SDL_gpu_vulkan.c 431 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936793779387939794079417942794379447945794679477948794979507951795279537954795579567957795879597960796179627963796479657966796779687969797079717972797379747975797679777978797979807981798279837984798579867987798879897990799179927993799479957996799779987999800080018002800380048005800680078008800980108011801280138014801580168017801880198020802180228023802480258026802780288029803080318032803380348035803680378038803980408041804280438044804580468047804880498050805180528053805480558056805780588059806080618062806380648065806680678068806980708071807280738074807580768077807880798080808180828083808480858086808780888089809080918092809380948095809680978098809981008101810281038104810581068107810881098110811181128113811481158116811781188119812081218122812381248125812681278128812981308131813281338134813581368137813881398140814181428143814481458146814781488149815081518152815381548155815681578158815981608161816281638164816581668167816881698170817181728173817481758176817781788179818081818182818381848185818681878188818981908191819281938194819581968197819881998200820182028203820482058206820782088209821082118212821382148215821682178218821982208221822282238224822582268227822882298230823182328233823482358236823782388239824082418242824382448245824682478248824982508251825282538254825582568257825882598260826182628263826482658266826782688269827082718272827382748275827682778278827982808281828282838284828582868287828882898290829182928293829482958296829782988299830083018302830383048305830683078308830983108311831283138314831583168317831883198320832183228323832483258326832783288329833083318332833383348335833683378338833983408341834283438344834583468347834883498350835183528353835483558356835783588359836083618362836383648365836683678368836983708371837283738374837583768377837883798380838183828383838483858386838783888389839083918392839383948395839683978398839984008401840284038404840584068407840884098410841184128413841484158416841784188419842084218422842384248425842684278428842984308431843284338434843584368437843884398440844184428443844484458446844784488449845084518452845384548455845684578458845984608461846284638464846584668467846884698470847184728473847484758476847784788479848084818482848384848485848684878488848984908491849284938494849584968497849884998500850185028503850485058506850785088509851085118512851385148515851685178518851985208521852285238524852585268527852885298530853185328533853485358536853785388539854085418542854385448545854685478548854985508551855285538554855585568557855885598560856185628563856485658566856785688569857085718572857385748575857685778578857985808581858285838584858585868587858885898590859185928593859485958596859785988599860086018602860386048605860686078608860986108611861286138614861586168617861886198620862186228623862486258626862786288629863086318632863386348635863686378638863986408641864286438644864586468647864886498650865186528653865486558656865786588659866086618662866386648665866686678668866986708671867286738674867586768677867886798680868186828683868486858686868786888689869086918692869386948695869686978698869987008701870287038704870587068707870887098710871187128713871487158716871787188719872087218722872387248725872687278728872987308731873287338734873587368737873887398740874187428743874487458746874787488749875087518752875387548755875687578758875987608761876287638764876587668767876887698770877187728773877487758776877787788779878087818782878387848785878687878788878987908791879287938794879587968797879887998800880188028803880488058806880788088809881088118812881388148815881688178818881988208821882288238824882588268827882888298830883188328833883488358836883788388839884088418842884388448845884688478848884988508851885288538854885588568857885888598860886188628863886488658866886788688869887088718872887388748875887688778878887988808881888288838884888588868887888888898890889188928893889488958896889788988899890089018902890389048905890689078908890989108911891289138914891589168917891889198920892189228923892489258926892789288929893089318932893389348935893689378938893989408941894289438944894589468947894889498950895189528953895489558956895789588959896089618962896389648965896689678968896989708971897289738974897589768977897889798980898189828983898489858986898789888989899089918992899389948995899689978998899990009001900290039004900590069007900890099010901190129013901490159016901790189019902090219022902390249025902690279028902990309031903290339034903590369037903890399040904190429043904490459046904790489049905090519052905390549055905690579058905990609061906290639064906590669067906890699070907190729073907490759076907790789079908090819082908390849085908690879088908990909091909290939094909590969097909890999100910191029103910491059106910791089109911091119112911391149115911691179118911991209121912291239124912591269127912891299130913191329133913491359136913791389139914091419142914391449145914691479148914991509151915291539154915591569157915891599160916191629163916491659166916791689169917091719172917391749175917691779178917991809181918291839184918591869187918891899190919191929193919491959196919791989199920092019202920392049205920692079208920992109211921292139214921592169217921892199220922192229223922492259226922792289229923092319232923392349235923692379238923992409241924292439244924592469247924892499250925192529253925492559256925792589259926092619262926392649265926692679268926992709271927292739274927592769277927892799280928192829283928492859286928792889289929092919292929392949295929692979298929993009301930293039304930593069307930893099310931193129313931493159316931793189319932093219322932393249325932693279328932993309331933293339334933593369337933893399340934193429343934493459346934793489349935093519352935393549355935693579358935993609361936293639364936593669367936893699370937193729373937493759376937793789379938093819382938393849385938693879388938993909391939293939394939593969397939893999400940194029403940494059406940794089409941094119412941394149415941694179418941994209421942294239424942594269427942894299430943194329433943494359436943794389439944094419442944394449445944694479448944994509451945294539454945594569457945894599460946194629463946494659466946794689469947094719472947394749475947694779478947994809481948294839484948594869487948894899490949194929493949494959496949794989499950095019502950395049505950695079508950995109511951295139514951595169517951895199520952195229523952495259526952795289529953095319532953395349535953695379538953995409541954295439544954595469547954895499550955195529553955495559556955795589559956095619562956395649565956695679568956995709571957295739574957595769577957895799580958195829583958495859586958795889589959095919592959395949595959695979598959996009601960296039604960596069607960896099610961196129613961496159616961796189619962096219622962396249625962696279628962996309631963296339634963596369637963896399640964196429643964496459646964796489649965096519652965396549655965696579658965996609661966296639664966596669667966896699670967196729673967496759676967796789679968096819682968396849685968696879688968996909691969296939694969596969697969896999700970197029703970497059706970797089709971097119712971397149715971697179718971997209721972297239724972597269727972897299730973197329733973497359736973797389739974097419742974397449745974697479748974997509751975297539754975597569757975897599760976197629763976497659766976797689769977097719772977397749775977697779778977997809781978297839784978597869787978897899790979197929793979497959796979797989799980098019802980398049805980698079808980998109811981298139814981598169817981898199820982198229823982498259826982798289829983098319832983398349835983698379838983998409841984298439844984598469847984898499850985198529853985498559856985798589859986098619862986398649865986698679868986998709871987298739874987598769877987898799880988198829883988498859886988798889889989098919892989398949895989698979898989999009901990299039904990599069907990899099910991199129913991499159916991799189919992099219922992399249925992699279928992999309931993299339934993599369937993899399940994199429943994499459946994799489949995099519952995399549955995699579958995999609961996299639964996599669967996899699970997199729973997499759976997799789979998099819982998399849985998699879988998999909991999299939994999599969997999899991000010001100021000310004100051000610007100081000910010100111001210013100141001510016100171001810019100201002110022100231002410025100261002710028100291003010031100321003310034100351003610037100381003910040100411004210043100441004510046100471004810049100501005110052100531005410055100561005710058100591006010061100621006310064100651006610067100681006910070100711007210073100741007510076100771007810079100801008110082100831008410085100861008710088100891009010091100921009310094100951009610097100981009910100101011010210103101041010510106101071010810109101101011110112101131011410115101161011710118101191012010121101221012310124101251012610127101281012910130101311013210133101341013510136101371013810139101401014110142101431014410145101461014710148101491015010151101521015310154101551015610157101581015910160101611016210163101641016510166101671016810169101701017110172101731017410175101761017710178101791018010181101821018310184101851018610187101881018910190101911019210193101941019510196101971019810199102001020110202102031020410205102061020710208102091021010211102121021310214102151021610217102181021910220102211022210223102241022510226102271022810229102301023110232102331023410235102361023710238102391024010241102421024310244102451024610247102481024910250102511025210253102541025510256102571025810259102601026110262102631026410265102661026710268102691027010271102721027310274102751027610277102781027910280102811028210283102841028510286102871028810289102901029110292102931029410295102961029710298102991030010301103021030310304103051030610307103081030910310103111031210313103141031510316103171031810319103201032110322103231032410325103261032710328103291033010331103321033310334103351033610337103381033910340103411034210343103441034510346103471034810349103501035110352103531035410355103561035710358103591036010361103621036310364103651036610367103681036910370103711037210373103741037510376103771037810379103801038110382103831038410385103861038710388103891039010391103921039310394103951039610397103981039910400104011040210403104041040510406104071040810409104101041110412104131041410415104161041710418104191042010421104221042310424104251042610427104281042910430104311043210433104341043510436104371043810439104401044110442104431044410445104461044710448104491045010451104521045310454104551045610457104581045910460104611046210463104641046510466104671046810469104701047110472104731047410475104761047710478104791048010481104821048310484104851048610487104881048910490104911049210493104941049510496104971049810499105001050110502105031050410505105061050710508105091051010511105121051310514105151051610517105181051910520105211052210523105241052510526105271052810529105301053110532105331053410535105361053710538105391054010541105421054310544105451054610547105481054910550105511055210553105541055510556105571055810559105601056110562105631056410565105661056710568105691057010571105721057310574105751057610577105781057910580105811058210583105841058510586105871058810589105901059110592105931059410595105961059710598105991060010601106021060310604106051060610607106081060910610106111061210613106141061510616106171061810619106201062110622106231062410625106261062710628106291063010631106321063310634106351063610637106381063910640106411064210643106441064510646106471064810649106501065110652106531065410655106561065710658106591066010661106621066310664106651066610667106681066910670106711067210673106741067510676106771067810679106801068110682106831068410685106861068710688106891069010691106921069310694106951069610697106981069910700107011070210703107041070510706107071070810709107101071110712107131071410715107161071710718107191072010721107221072310724107251072610727107281072910730107311073210733107341073510736107371073810739107401074110742107431074410745107461074710748107491075010751107521075310754107551075610757107581075910760107611076210763107641076510766107671076810769107701077110772107731077410775107761077710778107791078010781107821078310784107851078610787107881078910790107911079210793107941079510796107971079810799108001080110802108031080410805108061080710808108091081010811108121081310814108151081610817108181081910820108211082210823108241082510826108271082810829108301083110832108331083410835108361083710838108391084010841108421084310844108451084610847108481084910850108511085210853108541085510856108571085810859108601086110862108631086410865108661086710868108691087010871108721087310874108751087610877108781087910880108811088210883108841088510886108871088810889108901089110892108931089410895108961089710898108991090010901109021090310904109051090610907109081090910910109111091210913109141091510916109171091810919109201092110922109231092410925109261092710928109291093010931109321093310934109351093610937109381093910940109411094210943109441094510946109471094810949109501095110952109531095410955109561095710958109591096010961109621096310964109651096610967109681096910970109711097210973109741097510976109771097810979109801098110982109831098410985109861098710988109891099010991109921099310994109951099610997109981099911000110011100211003110041100511006110071100811009110101101111012110131101411015110161101711018110191102011021110221102311024110251102611027110281102911030110311103211033110341103511036110371103811039110401104111042110431104411045110461104711048110491105011051110521105311054110551105611057110581105911060110611106211063110641106511066110671106811069110701107111072110731107411075110761107711078110791108011081110821108311084110851108611087110881108911090110911109211093110941109511096110971109811099111001110111102111031110411105111061110711108111091111011111111121111311114111151111611117111181111911120111211112211123111241112511126111271112811129111301113111132111331113411135111361113711138111391114011141111421114311144111451114611147111481114911150111511115211153111541115511156111571115811159111601116111162111631116411165111661116711168111691117011171111721117311174111751117611177111781117911180111811118211183111841118511186111871118811189111901119111192111931119411195111961119711198111991120011201112021120311204112051120611207112081120911210112111121211213112141121511216112171121811219112201122111222112231122411225112261122711228112291123011231112321123311234112351123611237112381123911240112411124211243112441124511246112471124811249112501125111252112531125411255112561125711258112591126011261112621126311264112651126611267112681126911270112711127211273112741127511276112771127811279112801128111282112831128411285112861128711288112891129011291112921129311294112951129611297112981129911300113011130211303113041130511306113071130811309113101131111312113131131411315113161131711318113191132011321113221132311324113251132611327113281132911330113311133211333113341133511336113371133811339113401134111342113431134411345113461134711348113491135011351113521135311354113551135611357113581135911360113611136211363113641136511366113671136811369113701137111372113731137411375113761137711378113791138011381113821138311384113851138611387113881138911390113911139211393113941139511396113971139811399114001140111402114031140411405114061140711408114091141011411114121141311414114151141611417114181141911420114211142211423114241142511426114271142811429114301143111432114331143411435114361143711438114391144011441114421144311444114451144611447114481144911450114511145211453114541145511456114571145811459114601146111462114631146411465114661146711468114691147011471114721147311474114751147611477114781147911480114811148211483114841148511486114871148811489114901149111492114931149411495114961149711498114991150011501115021150311504115051150611507115081150911510115111151211513115141151511516115171151811519115201152111522115231152411525115261152711528115291153011531115321153311534115351153611537115381153911540115411154211543115441154511546115471154811549115501155111552115531155411555115561155711558115591156011561115621156311564115651156611567115681156911570115711157211573115741157511576115771157811579115801158111582115831158411585115861158711588115891159011591115921159311594115951159611597115981159911600116011160211603116041160511606116071160811609116101161111612116131161411615116161161711618116191162011621116221162311624116251162611627116281162911630116311163211633116341163511636116371163811639116401164111642116431164411645116461164711648116491165011651116521165311654116551165611657116581165911660116611166211663116641166511666116671166811669116701167111672116731167411675116761167711678116791168011681116821168311684116851168611687116881168911690116911169211693116941169511696116971169811699117001170111702117031170411705117061170711708117091171011711117121171311714117151171611717117181171911720117211172211723117241172511726117271172811729117301173111732117331173411735117361173711738117391174011741117421174311744117451174611747117481174911750117511175211753117541175511756117571175811759117601176111762117631176411765117661176711768117691177011771117721177311774117751177611777117781177911780117811178211783117841178511786117871178811789117901179111792117931179411795117961179711798
  1. /*
  2. Simple DirectMedia Layer
  3. Copyright (C) 1997-2024 Sam Lantinga <slouken@libsdl.org>
  4. This software is provided 'as-is', without any express or implied
  5. warranty. In no event will the authors be held liable for any damages
  6. arising from the use of this software.
  7. Permission is granted to anyone to use this software for any purpose,
  8. including commercial applications, and to alter it and redistribute it
  9. freely, subject to the following restrictions:
  10. 1. The origin of this software must not be misrepresented; you must not
  11. claim that you wrote the original software. If you use this software
  12. in a product, an acknowledgment in the product documentation would be
  13. appreciated but is not required.
  14. 2. Altered source versions must be plainly marked as such, and must not be
  15. misrepresented as being the original software.
  16. 3. This notice may not be removed or altered from any source distribution.
  17. */
  18. #include "SDL_internal.h"
  19. #if SDL_GPU_VULKAN
  20. // Needed for VK_KHR_portability_subset
  21. #define VK_ENABLE_BETA_EXTENSIONS
  22. #define VK_NO_PROTOTYPES
  23. #include "../../video/khronos/vulkan/vulkan.h"
  24. #include "SDL_hashtable.h"
  25. #include <SDL3/SDL_vulkan.h>
  26. #include "../SDL_sysgpu.h"
  27. #define VULKAN_INTERNAL_clamp(val, min, max) SDL_max(min, SDL_min(val, max))
  28. // Global Vulkan Loader Entry Points
  29. static PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = NULL;
  30. #define VULKAN_GLOBAL_FUNCTION(name) \
  31. static PFN_##name name = NULL;
  32. #include "SDL_gpu_vulkan_vkfuncs.h"
  33. typedef struct VulkanExtensions
  34. {
  35. // These extensions are required!
  36. // Globally supported
  37. Uint8 KHR_swapchain;
  38. // Core since 1.1, needed for negative VkViewport::height
  39. Uint8 KHR_maintenance1;
  40. // These extensions are optional!
  41. // Core since 1.2, but requires annoying paperwork to implement
  42. Uint8 KHR_driver_properties;
  43. // EXT, probably not going to be Core
  44. Uint8 EXT_vertex_attribute_divisor;
  45. // Only required for special implementations (i.e. MoltenVK)
  46. Uint8 KHR_portability_subset;
  47. } VulkanExtensions;
  48. // Defines
  49. #define SMALL_ALLOCATION_THRESHOLD 2097152 // 2 MiB
  50. #define SMALL_ALLOCATION_SIZE 16777216 // 16 MiB
  51. #define LARGE_ALLOCATION_INCREMENT 67108864 // 64 MiB
  52. #define MAX_UBO_SECTION_SIZE 4096 // 4 KiB
  53. #define DESCRIPTOR_POOL_STARTING_SIZE 128
  54. #define WINDOW_PROPERTY_DATA "SDL_GPUVulkanWindowPropertyData"
  55. #define IDENTITY_SWIZZLE \
  56. { \
  57. VK_COMPONENT_SWIZZLE_IDENTITY, \
  58. VK_COMPONENT_SWIZZLE_IDENTITY, \
  59. VK_COMPONENT_SWIZZLE_IDENTITY, \
  60. VK_COMPONENT_SWIZZLE_IDENTITY \
  61. }
  62. #define NULL_DESC_LAYOUT (VkDescriptorSetLayout)0
  63. #define NULL_PIPELINE_LAYOUT (VkPipelineLayout)0
  64. #define NULL_RENDER_PASS (SDL_GPURenderPass *)0
  65. #define EXPAND_ELEMENTS_IF_NEEDED(arr, initialValue, type) \
  66. if (arr->count == arr->capacity) { \
  67. if (arr->capacity == 0) { \
  68. arr->capacity = initialValue; \
  69. } else { \
  70. arr->capacity *= 2; \
  71. } \
  72. arr->elements = (type *)SDL_realloc( \
  73. arr->elements, \
  74. arr->capacity * sizeof(type)); \
  75. }
  76. #define EXPAND_ARRAY_IF_NEEDED(arr, elementType, newCount, capacity, newCapacity) \
  77. if (newCount >= capacity) { \
  78. capacity = newCapacity; \
  79. arr = (elementType *)SDL_realloc( \
  80. arr, \
  81. sizeof(elementType) * capacity); \
  82. }
  83. #define MOVE_ARRAY_CONTENTS_AND_RESET(i, dstArr, dstCount, srcArr, srcCount) \
  84. for (i = 0; i < srcCount; i += 1) { \
  85. dstArr[i] = srcArr[i]; \
  86. } \
  87. dstCount = srcCount; \
  88. srcCount = 0;
  89. // Conversions
  90. static const Uint8 DEVICE_PRIORITY_HIGHPERFORMANCE[] = {
  91. 0, // VK_PHYSICAL_DEVICE_TYPE_OTHER
  92. 3, // VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU
  93. 4, // VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU
  94. 2, // VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU
  95. 1 // VK_PHYSICAL_DEVICE_TYPE_CPU
  96. };
  97. static const Uint8 DEVICE_PRIORITY_LOWPOWER[] = {
  98. 0, // VK_PHYSICAL_DEVICE_TYPE_OTHER
  99. 4, // VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU
  100. 3, // VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU
  101. 2, // VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU
  102. 1 // VK_PHYSICAL_DEVICE_TYPE_CPU
  103. };
  104. static VkPresentModeKHR SDLToVK_PresentMode[] = {
  105. VK_PRESENT_MODE_FIFO_KHR,
  106. VK_PRESENT_MODE_IMMEDIATE_KHR,
  107. VK_PRESENT_MODE_MAILBOX_KHR
  108. };
  109. static VkFormat SDLToVK_SurfaceFormat[] = {
  110. VK_FORMAT_R8G8B8A8_UNORM, // R8G8B8A8_UNORM
  111. VK_FORMAT_B8G8R8A8_UNORM, // B8G8R8A8_UNORM
  112. VK_FORMAT_R5G6B5_UNORM_PACK16, // B5G6R5_UNORM
  113. VK_FORMAT_A1R5G5B5_UNORM_PACK16, // B5G5R5A1_UNORM
  114. VK_FORMAT_B4G4R4A4_UNORM_PACK16, // B4G4R4A4_UNORM
  115. VK_FORMAT_A2B10G10R10_UNORM_PACK32, // R10G10B10A2_UNORM
  116. VK_FORMAT_R16G16_UNORM, // R16G16_UNORM
  117. VK_FORMAT_R16G16B16A16_UNORM, // R16G16B16A16_UNORM
  118. VK_FORMAT_R8_UNORM, // R8_UNORM
  119. VK_FORMAT_R8_UNORM, // A8_UNORM
  120. VK_FORMAT_BC1_RGBA_UNORM_BLOCK, // BC1_UNORM
  121. VK_FORMAT_BC2_UNORM_BLOCK, // BC2_UNORM
  122. VK_FORMAT_BC3_UNORM_BLOCK, // BC3_UNORM
  123. VK_FORMAT_BC7_UNORM_BLOCK, // BC7_UNORM
  124. VK_FORMAT_R8G8_SNORM, // R8G8_SNORM
  125. VK_FORMAT_R8G8B8A8_SNORM, // R8G8B8A8_SNORM
  126. VK_FORMAT_R16_SFLOAT, // R16_FLOAT
  127. VK_FORMAT_R16G16_SFLOAT, // R16G16_FLOAT
  128. VK_FORMAT_R16G16B16A16_SFLOAT, // R16G16B16A16_FLOAT
  129. VK_FORMAT_R32_SFLOAT, // R32_FLOAT
  130. VK_FORMAT_R32G32_SFLOAT, // R32G32_FLOAT
  131. VK_FORMAT_R32G32B32A32_SFLOAT, // R32G32B32A32_FLOAT
  132. VK_FORMAT_R8_UINT, // R8_UINT
  133. VK_FORMAT_R8G8_UINT, // R8G8_UINT
  134. VK_FORMAT_R8G8B8A8_UINT, // R8G8B8A8_UINT
  135. VK_FORMAT_R16_UINT, // R16_UINT
  136. VK_FORMAT_R16G16_UINT, // R16G16_UINT
  137. VK_FORMAT_R16G16B16A16_UINT, // R16G16B16A16_UINT
  138. VK_FORMAT_R8G8B8A8_SRGB, // R8G8B8A8_UNORM_SRGB
  139. VK_FORMAT_B8G8R8A8_SRGB, // B8G8R8A8_UNORM_SRGB
  140. VK_FORMAT_BC3_SRGB_BLOCK, // BC3_UNORM_SRGB
  141. VK_FORMAT_BC7_SRGB_BLOCK, // BC7_UNORM_SRGB
  142. VK_FORMAT_D16_UNORM, // D16_UNORM
  143. VK_FORMAT_X8_D24_UNORM_PACK32, // D24_UNORM
  144. VK_FORMAT_D32_SFLOAT, // D32_FLOAT
  145. VK_FORMAT_D24_UNORM_S8_UINT, // D24_UNORM_S8_UINT
  146. VK_FORMAT_D32_SFLOAT_S8_UINT, // D32_FLOAT_S8_UINT
  147. };
  148. SDL_COMPILE_TIME_ASSERT(SDLToVK_SurfaceFormat, SDL_arraysize(SDLToVK_SurfaceFormat) == SDL_GPU_TEXTUREFORMAT_MAX);
  149. static VkComponentMapping SDLToVK_SurfaceSwizzle[] = {
  150. IDENTITY_SWIZZLE, // R8G8B8A8
  151. IDENTITY_SWIZZLE, // B8G8R8A8
  152. {
  153. // B5G6R5
  154. VK_COMPONENT_SWIZZLE_B,
  155. VK_COMPONENT_SWIZZLE_G,
  156. VK_COMPONENT_SWIZZLE_R,
  157. VK_COMPONENT_SWIZZLE_ONE,
  158. },
  159. {
  160. // B5G5R5A1
  161. VK_COMPONENT_SWIZZLE_B,
  162. VK_COMPONENT_SWIZZLE_G,
  163. VK_COMPONENT_SWIZZLE_R,
  164. VK_COMPONENT_SWIZZLE_A,
  165. },
  166. IDENTITY_SWIZZLE, // B4G4R4A4
  167. {
  168. // R10G10B10A2
  169. VK_COMPONENT_SWIZZLE_R,
  170. VK_COMPONENT_SWIZZLE_G,
  171. VK_COMPONENT_SWIZZLE_B,
  172. VK_COMPONENT_SWIZZLE_A,
  173. },
  174. IDENTITY_SWIZZLE, // R16G16
  175. IDENTITY_SWIZZLE, // R16G16B16A16
  176. IDENTITY_SWIZZLE, // R8
  177. {
  178. // A8
  179. VK_COMPONENT_SWIZZLE_ZERO,
  180. VK_COMPONENT_SWIZZLE_ZERO,
  181. VK_COMPONENT_SWIZZLE_ZERO,
  182. VK_COMPONENT_SWIZZLE_R,
  183. },
  184. IDENTITY_SWIZZLE, // BC1
  185. IDENTITY_SWIZZLE, // BC2
  186. IDENTITY_SWIZZLE, // BC3
  187. IDENTITY_SWIZZLE, // BC7
  188. IDENTITY_SWIZZLE, // R8G8_SNORM
  189. IDENTITY_SWIZZLE, // R8G8B8A8_SNORM
  190. IDENTITY_SWIZZLE, // R16_SFLOAT
  191. IDENTITY_SWIZZLE, // R16G16_SFLOAT
  192. IDENTITY_SWIZZLE, // R16G16B16A16_SFLOAT
  193. IDENTITY_SWIZZLE, // R32_SFLOAT
  194. IDENTITY_SWIZZLE, // R32G32_SFLOAT
  195. IDENTITY_SWIZZLE, // R32G32B32A32_SFLOAT
  196. IDENTITY_SWIZZLE, // R8_UINT
  197. IDENTITY_SWIZZLE, // R8G8_UINT
  198. IDENTITY_SWIZZLE, // R8G8B8A8_UINT
  199. IDENTITY_SWIZZLE, // R16_UINT
  200. IDENTITY_SWIZZLE, // R16G16_UINT
  201. IDENTITY_SWIZZLE, // R16G16B16A16_UINT
  202. IDENTITY_SWIZZLE, // R8G8B8A8_SRGB
  203. IDENTITY_SWIZZLE, // B8G8R8A8_SRGB
  204. IDENTITY_SWIZZLE, // BC3_SRGB
  205. IDENTITY_SWIZZLE, // BC7_SRGB
  206. IDENTITY_SWIZZLE, // D16_UNORM
  207. IDENTITY_SWIZZLE, // D24_UNORM
  208. IDENTITY_SWIZZLE, // D32_SFLOAT
  209. IDENTITY_SWIZZLE, // D24_UNORM_S8_UINT
  210. IDENTITY_SWIZZLE, // D32_SFLOAT_S8_UINT
  211. };
  212. static VkFormat SwapchainCompositionToFormat[] = {
  213. VK_FORMAT_B8G8R8A8_UNORM, // SDR
  214. VK_FORMAT_B8G8R8A8_SRGB, // SDR_LINEAR
  215. VK_FORMAT_R16G16B16A16_SFLOAT, // HDR_EXTENDED_LINEAR
  216. VK_FORMAT_A2B10G10R10_UNORM_PACK32 // HDR10_ST2048
  217. };
  218. static VkFormat SwapchainCompositionToFallbackFormat[] = {
  219. VK_FORMAT_R8G8B8A8_UNORM,
  220. VK_FORMAT_R8G8B8A8_SRGB,
  221. VK_FORMAT_UNDEFINED, // no fallback
  222. VK_FORMAT_UNDEFINED // no fallback
  223. };
  224. static SDL_GPUTextureFormat SwapchainCompositionToSDLFormat(
  225. SDL_GPUSwapchainComposition composition,
  226. bool usingFallback)
  227. {
  228. switch (composition) {
  229. case SDL_GPU_SWAPCHAINCOMPOSITION_SDR:
  230. return usingFallback ? SDL_GPU_TEXTUREFORMAT_R8G8B8A8_UNORM : SDL_GPU_TEXTUREFORMAT_B8G8R8A8_UNORM;
  231. case SDL_GPU_SWAPCHAINCOMPOSITION_SDR_LINEAR:
  232. return usingFallback ? SDL_GPU_TEXTUREFORMAT_R8G8B8A8_UNORM_SRGB : SDL_GPU_TEXTUREFORMAT_B8G8R8A8_UNORM_SRGB;
  233. case SDL_GPU_SWAPCHAINCOMPOSITION_HDR_EXTENDED_LINEAR:
  234. return SDL_GPU_TEXTUREFORMAT_R16G16B16A16_FLOAT;
  235. case SDL_GPU_SWAPCHAINCOMPOSITION_HDR10_ST2048:
  236. return SDL_GPU_TEXTUREFORMAT_R10G10B10A2_UNORM;
  237. default:
  238. return SDL_GPU_TEXTUREFORMAT_INVALID;
  239. }
  240. }
  241. static VkColorSpaceKHR SwapchainCompositionToColorSpace[] = {
  242. VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
  243. VK_COLOR_SPACE_SRGB_NONLINEAR_KHR,
  244. VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT,
  245. VK_COLOR_SPACE_HDR10_ST2084_EXT
  246. };
  247. static VkComponentMapping SwapchainCompositionSwizzle[] = {
  248. IDENTITY_SWIZZLE, // SDR
  249. IDENTITY_SWIZZLE, // SDR_SRGB
  250. IDENTITY_SWIZZLE, // HDR
  251. {
  252. // HDR_ADVANCED
  253. VK_COMPONENT_SWIZZLE_R,
  254. VK_COMPONENT_SWIZZLE_G,
  255. VK_COMPONENT_SWIZZLE_B,
  256. VK_COMPONENT_SWIZZLE_A,
  257. }
  258. };
  259. static VkFormat SDLToVK_VertexFormat[] = {
  260. VK_FORMAT_R32_SINT, // INT
  261. VK_FORMAT_R32G32_SINT, // INT2
  262. VK_FORMAT_R32G32B32_SINT, // INT3
  263. VK_FORMAT_R32G32B32A32_SINT, // INT4
  264. VK_FORMAT_R32_UINT, // UINT
  265. VK_FORMAT_R32G32_UINT, // UINT2
  266. VK_FORMAT_R32G32B32_UINT, // UINT3
  267. VK_FORMAT_R32G32B32A32_UINT, // UINT4
  268. VK_FORMAT_R32_SFLOAT, // FLOAT
  269. VK_FORMAT_R32G32_SFLOAT, // FLOAT2
  270. VK_FORMAT_R32G32B32_SFLOAT, // FLOAT3
  271. VK_FORMAT_R32G32B32A32_SFLOAT, // FLOAT4
  272. VK_FORMAT_R8G8_SINT, // BYTE2
  273. VK_FORMAT_R8G8B8A8_SINT, // BYTE4
  274. VK_FORMAT_R8G8_UINT, // UBYTE2
  275. VK_FORMAT_R8G8B8A8_UINT, // UBYTE4
  276. VK_FORMAT_R8G8_SNORM, // BYTE2_NORM
  277. VK_FORMAT_R8G8B8A8_SNORM, // BYTE4_NORM
  278. VK_FORMAT_R8G8_UNORM, // UBYTE2_NORM
  279. VK_FORMAT_R8G8B8A8_UNORM, // UBYTE4_NORM
  280. VK_FORMAT_R16G16_SINT, // SHORT2
  281. VK_FORMAT_R16G16B16A16_SINT, // SHORT4
  282. VK_FORMAT_R16G16_UINT, // USHORT2
  283. VK_FORMAT_R16G16B16A16_UINT, // USHORT4
  284. VK_FORMAT_R16G16_SNORM, // SHORT2_NORM
  285. VK_FORMAT_R16G16B16A16_SNORM, // SHORT4_NORM
  286. VK_FORMAT_R16G16_UNORM, // USHORT2_NORM
  287. VK_FORMAT_R16G16B16A16_UNORM, // USHORT4_NORM
  288. VK_FORMAT_R16G16_SFLOAT, // HALF2
  289. VK_FORMAT_R16G16B16A16_SFLOAT // HALF4
  290. };
  291. static VkIndexType SDLToVK_IndexType[] = {
  292. VK_INDEX_TYPE_UINT16,
  293. VK_INDEX_TYPE_UINT32
  294. };
  295. static VkPrimitiveTopology SDLToVK_PrimitiveType[] = {
  296. VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
  297. VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
  298. VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
  299. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
  300. VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
  301. };
  302. static VkCullModeFlags SDLToVK_CullMode[] = {
  303. VK_CULL_MODE_NONE,
  304. VK_CULL_MODE_FRONT_BIT,
  305. VK_CULL_MODE_BACK_BIT,
  306. VK_CULL_MODE_FRONT_AND_BACK
  307. };
  308. static VkFrontFace SDLToVK_FrontFace[] = {
  309. VK_FRONT_FACE_COUNTER_CLOCKWISE,
  310. VK_FRONT_FACE_CLOCKWISE
  311. };
  312. static VkBlendFactor SDLToVK_BlendFactor[] = {
  313. VK_BLEND_FACTOR_ZERO,
  314. VK_BLEND_FACTOR_ONE,
  315. VK_BLEND_FACTOR_SRC_COLOR,
  316. VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
  317. VK_BLEND_FACTOR_DST_COLOR,
  318. VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
  319. VK_BLEND_FACTOR_SRC_ALPHA,
  320. VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
  321. VK_BLEND_FACTOR_DST_ALPHA,
  322. VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
  323. VK_BLEND_FACTOR_CONSTANT_COLOR,
  324. VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
  325. VK_BLEND_FACTOR_CONSTANT_ALPHA,
  326. VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
  327. VK_BLEND_FACTOR_SRC_ALPHA_SATURATE
  328. };
  329. static VkBlendOp SDLToVK_BlendOp[] = {
  330. VK_BLEND_OP_ADD,
  331. VK_BLEND_OP_SUBTRACT,
  332. VK_BLEND_OP_REVERSE_SUBTRACT,
  333. VK_BLEND_OP_MIN,
  334. VK_BLEND_OP_MAX
  335. };
  336. static VkCompareOp SDLToVK_CompareOp[] = {
  337. VK_COMPARE_OP_NEVER,
  338. VK_COMPARE_OP_LESS,
  339. VK_COMPARE_OP_EQUAL,
  340. VK_COMPARE_OP_LESS_OR_EQUAL,
  341. VK_COMPARE_OP_GREATER,
  342. VK_COMPARE_OP_NOT_EQUAL,
  343. VK_COMPARE_OP_GREATER_OR_EQUAL,
  344. VK_COMPARE_OP_ALWAYS
  345. };
  346. static VkStencilOp SDLToVK_StencilOp[] = {
  347. VK_STENCIL_OP_KEEP,
  348. VK_STENCIL_OP_ZERO,
  349. VK_STENCIL_OP_REPLACE,
  350. VK_STENCIL_OP_INCREMENT_AND_CLAMP,
  351. VK_STENCIL_OP_DECREMENT_AND_CLAMP,
  352. VK_STENCIL_OP_INVERT,
  353. VK_STENCIL_OP_INCREMENT_AND_WRAP,
  354. VK_STENCIL_OP_DECREMENT_AND_WRAP
  355. };
  356. static VkAttachmentLoadOp SDLToVK_LoadOp[] = {
  357. VK_ATTACHMENT_LOAD_OP_LOAD,
  358. VK_ATTACHMENT_LOAD_OP_CLEAR,
  359. VK_ATTACHMENT_LOAD_OP_DONT_CARE
  360. };
  361. static VkAttachmentStoreOp SDLToVK_StoreOp[] = {
  362. VK_ATTACHMENT_STORE_OP_STORE,
  363. VK_ATTACHMENT_STORE_OP_DONT_CARE
  364. };
  365. static VkSampleCountFlagBits SDLToVK_SampleCount[] = {
  366. VK_SAMPLE_COUNT_1_BIT,
  367. VK_SAMPLE_COUNT_2_BIT,
  368. VK_SAMPLE_COUNT_4_BIT,
  369. VK_SAMPLE_COUNT_8_BIT
  370. };
  371. static VkVertexInputRate SDLToVK_VertexInputRate[] = {
  372. VK_VERTEX_INPUT_RATE_VERTEX,
  373. VK_VERTEX_INPUT_RATE_INSTANCE
  374. };
  375. static VkFilter SDLToVK_Filter[] = {
  376. VK_FILTER_NEAREST,
  377. VK_FILTER_LINEAR
  378. };
  379. static VkSamplerMipmapMode SDLToVK_SamplerMipmapMode[] = {
  380. VK_SAMPLER_MIPMAP_MODE_NEAREST,
  381. VK_SAMPLER_MIPMAP_MODE_LINEAR
  382. };
  383. static VkSamplerAddressMode SDLToVK_SamplerAddressMode[] = {
  384. VK_SAMPLER_ADDRESS_MODE_REPEAT,
  385. VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
  386. VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE
  387. };
  388. // Structures
  389. typedef struct VulkanMemoryAllocation VulkanMemoryAllocation;
  390. typedef struct VulkanBuffer VulkanBuffer;
  391. typedef struct VulkanBufferContainer VulkanBufferContainer;
  392. typedef struct VulkanTexture VulkanTexture;
  393. typedef struct VulkanTextureContainer VulkanTextureContainer;
  394. typedef struct VulkanFenceHandle
  395. {
  396. VkFence fence;
  397. SDL_AtomicInt referenceCount;
  398. } VulkanFenceHandle;
  399. // Memory Allocation
  400. typedef struct VulkanMemoryFreeRegion
  401. {
  402. VulkanMemoryAllocation *allocation;
  403. VkDeviceSize offset;
  404. VkDeviceSize size;
  405. Uint32 allocationIndex;
  406. Uint32 sortedIndex;
  407. } VulkanMemoryFreeRegion;
  408. typedef struct VulkanMemoryUsedRegion
  409. {
  410. VulkanMemoryAllocation *allocation;
  411. VkDeviceSize offset;
  412. VkDeviceSize size;
  413. VkDeviceSize resourceOffset; // differs from offset based on alignment
  414. VkDeviceSize resourceSize; // differs from size based on alignment
  415. VkDeviceSize alignment;
  416. Uint8 isBuffer;
  417. union
  418. {
  419. VulkanBuffer *vulkanBuffer;
  420. VulkanTexture *vulkanTexture;
  421. };
  422. } VulkanMemoryUsedRegion;
  423. typedef struct VulkanMemorySubAllocator
  424. {
  425. Uint32 memoryTypeIndex;
  426. VulkanMemoryAllocation **allocations;
  427. Uint32 allocationCount;
  428. VulkanMemoryFreeRegion **sortedFreeRegions;
  429. Uint32 sortedFreeRegionCount;
  430. Uint32 sortedFreeRegionCapacity;
  431. } VulkanMemorySubAllocator;
  432. struct VulkanMemoryAllocation
  433. {
  434. VulkanMemorySubAllocator *allocator;
  435. VkDeviceMemory memory;
  436. VkDeviceSize size;
  437. VulkanMemoryUsedRegion **usedRegions;
  438. Uint32 usedRegionCount;
  439. Uint32 usedRegionCapacity;
  440. VulkanMemoryFreeRegion **freeRegions;
  441. Uint32 freeRegionCount;
  442. Uint32 freeRegionCapacity;
  443. Uint8 availableForAllocation;
  444. VkDeviceSize freeSpace;
  445. VkDeviceSize usedSpace;
  446. Uint8 *mapPointer;
  447. SDL_Mutex *memoryLock;
  448. };
  449. typedef struct VulkanMemoryAllocator
  450. {
  451. VulkanMemorySubAllocator subAllocators[VK_MAX_MEMORY_TYPES];
  452. } VulkanMemoryAllocator;
  453. // Memory structures
  454. /* We use pointer indirection so that defrag can occur without objects
  455. * needing to be aware of the backing buffers changing.
  456. */
  457. typedef struct VulkanBufferHandle
  458. {
  459. VulkanBuffer *vulkanBuffer;
  460. VulkanBufferContainer *container;
  461. } VulkanBufferHandle;
  462. typedef enum VulkanBufferType
  463. {
  464. VULKAN_BUFFER_TYPE_GPU,
  465. VULKAN_BUFFER_TYPE_UNIFORM,
  466. VULKAN_BUFFER_TYPE_TRANSFER
  467. } VulkanBufferType;
  468. struct VulkanBuffer
  469. {
  470. VkBuffer buffer;
  471. VkDeviceSize size;
  472. VulkanMemoryUsedRegion *usedRegion;
  473. VulkanBufferType type;
  474. SDL_GPUBufferUsageFlags usageFlags;
  475. SDL_AtomicInt referenceCount; // Tracks command buffer usage
  476. VulkanBufferHandle *handle;
  477. bool transitioned;
  478. Uint8 markedForDestroy; // so that defrag doesn't double-free
  479. };
  480. /* Buffer resources consist of multiple backing buffer handles so that data transfers
  481. * can occur without blocking or the client having to manage extra resources.
  482. *
  483. * Cast from SDL_GPUBuffer or SDL_GPUTransferBuffer.
  484. */
  485. struct VulkanBufferContainer
  486. {
  487. VulkanBufferHandle *activeBufferHandle;
  488. /* These are all the buffer handles that have been used by this container.
  489. * If the resource is bound and then updated with a cycle parameter, a new resource
  490. * will be added to this list.
  491. * These can be reused after they are submitted and command processing is complete.
  492. */
  493. Uint32 bufferCapacity;
  494. Uint32 bufferCount;
  495. VulkanBufferHandle **bufferHandles;
  496. char *debugName;
  497. };
  498. // Renderer Structure
  499. typedef struct QueueFamilyIndices
  500. {
  501. Uint32 graphicsFamily;
  502. Uint32 presentFamily;
  503. Uint32 computeFamily;
  504. Uint32 transferFamily;
  505. } QueueFamilyIndices;
  506. typedef struct VulkanSampler
  507. {
  508. VkSampler sampler;
  509. SDL_AtomicInt referenceCount;
  510. } VulkanSampler;
  511. typedef struct VulkanShader
  512. {
  513. VkShaderModule shaderModule;
  514. const char *entryPointName;
  515. Uint32 samplerCount;
  516. Uint32 storageTextureCount;
  517. Uint32 storageBufferCount;
  518. Uint32 uniformBufferCount;
  519. SDL_AtomicInt referenceCount;
  520. } VulkanShader;
  521. typedef struct VulkanTextureHandle
  522. {
  523. VulkanTexture *vulkanTexture;
  524. VulkanTextureContainer *container;
  525. } VulkanTextureHandle;
  526. /* Textures are made up of individual subresources.
  527. * This helps us barrier the resource efficiently.
  528. */
  529. typedef struct VulkanTextureSubresource
  530. {
  531. VulkanTexture *parent;
  532. Uint32 layer;
  533. Uint32 level;
  534. VkImageView *renderTargetViews; // One render target view per depth slice
  535. VkImageView computeWriteView;
  536. VkImageView depthStencilView;
  537. VulkanTextureHandle *msaaTexHandle; // NULL if parent sample count is 1 or is depth target
  538. bool transitioned; // used for layout tracking
  539. } VulkanTextureSubresource;
  540. struct VulkanTexture
  541. {
  542. VulkanMemoryUsedRegion *usedRegion;
  543. VkImage image;
  544. VkImageView fullView; // used for samplers and storage reads
  545. VkExtent2D dimensions;
  546. SDL_GPUTextureType type;
  547. Uint8 isMSAAColorTarget;
  548. Uint32 depth;
  549. Uint32 layerCount;
  550. Uint32 levelCount;
  551. VkSampleCountFlagBits sampleCount; // NOTE: This refers to the sample count of a render target pass using this texture, not the actual sample count of the texture
  552. VkFormat format;
  553. VkComponentMapping swizzle;
  554. SDL_GPUTextureUsageFlags usageFlags;
  555. VkImageAspectFlags aspectFlags;
  556. Uint32 subresourceCount;
  557. VulkanTextureSubresource *subresources;
  558. VulkanTextureHandle *handle;
  559. Uint8 markedForDestroy; // so that defrag doesn't double-free
  560. SDL_AtomicInt referenceCount;
  561. };
  562. /* Texture resources consist of multiple backing texture handles so that data transfers
  563. * can occur without blocking or the client having to manage extra resources.
  564. *
  565. * Cast from SDL_GPUTexture.
  566. */
  567. struct VulkanTextureContainer
  568. {
  569. TextureCommonHeader header; // FIXME: Use this instead of passing so many args to CreateTexture
  570. VulkanTextureHandle *activeTextureHandle;
  571. /* These are all the texture handles that have been used by this container.
  572. * If the resource is bound and then updated with CYCLE, a new resource
  573. * will be added to this list.
  574. * These can be reused after they are submitted and command processing is complete.
  575. */
  576. Uint32 textureCapacity;
  577. Uint32 textureCount;
  578. VulkanTextureHandle **textureHandles;
  579. // Swapchain images cannot be cycled
  580. Uint8 canBeCycled;
  581. char *debugName;
  582. };
  583. typedef enum VulkanBufferUsageMode
  584. {
  585. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  586. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
  587. VULKAN_BUFFER_USAGE_MODE_VERTEX_READ,
  588. VULKAN_BUFFER_USAGE_MODE_INDEX_READ,
  589. VULKAN_BUFFER_USAGE_MODE_INDIRECT,
  590. VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ,
  591. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
  592. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
  593. } VulkanBufferUsageMode;
  594. typedef enum VulkanTextureUsageMode
  595. {
  596. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  597. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  598. VULKAN_TEXTURE_USAGE_MODE_SAMPLER,
  599. VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ,
  600. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
  601. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
  602. VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT,
  603. VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT,
  604. VULKAN_TEXTURE_USAGE_MODE_PRESENT
  605. } VulkanTextureUsageMode;
  606. typedef enum VulkanUniformBufferStage
  607. {
  608. VULKAN_UNIFORM_BUFFER_STAGE_VERTEX,
  609. VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT,
  610. VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE
  611. } VulkanUniformBufferStage;
  612. typedef struct VulkanFramebuffer
  613. {
  614. VkFramebuffer framebuffer;
  615. SDL_AtomicInt referenceCount;
  616. } VulkanFramebuffer;
  617. typedef struct VulkanSwapchainData
  618. {
  619. // Window surface
  620. VkSurfaceKHR surface;
  621. // Swapchain for window surface
  622. VkSwapchainKHR swapchain;
  623. VkFormat format;
  624. VkColorSpaceKHR colorSpace;
  625. VkComponentMapping swapchainSwizzle;
  626. VkPresentModeKHR presentMode;
  627. bool usingFallbackFormat;
  628. // Swapchain images
  629. VulkanTextureContainer *textureContainers; // use containers so that swapchain textures can use the same API as other textures
  630. Uint32 imageCount;
  631. // Synchronization primitives
  632. VkSemaphore imageAvailableSemaphore[MAX_FRAMES_IN_FLIGHT];
  633. VkSemaphore renderFinishedSemaphore[MAX_FRAMES_IN_FLIGHT];
  634. VulkanFenceHandle *inFlightFences[MAX_FRAMES_IN_FLIGHT];
  635. Uint32 frameCounter;
  636. } VulkanSwapchainData;
  637. typedef struct WindowData
  638. {
  639. SDL_Window *window;
  640. SDL_GPUSwapchainComposition swapchainComposition;
  641. SDL_GPUPresentMode presentMode;
  642. VulkanSwapchainData *swapchainData;
  643. bool needsSwapchainRecreate;
  644. } WindowData;
  645. typedef struct SwapchainSupportDetails
  646. {
  647. VkSurfaceCapabilitiesKHR capabilities;
  648. VkSurfaceFormatKHR *formats;
  649. Uint32 formatsLength;
  650. VkPresentModeKHR *presentModes;
  651. Uint32 presentModesLength;
  652. } SwapchainSupportDetails;
  653. typedef struct VulkanPresentData
  654. {
  655. WindowData *windowData;
  656. Uint32 swapchainImageIndex;
  657. } VulkanPresentData;
  658. typedef struct VulkanUniformBuffer
  659. {
  660. VulkanBufferHandle *bufferHandle;
  661. Uint32 drawOffset;
  662. Uint32 writeOffset;
  663. } VulkanUniformBuffer;
  664. typedef struct VulkanDescriptorInfo
  665. {
  666. VkDescriptorType descriptorType;
  667. VkShaderStageFlagBits stageFlag;
  668. } VulkanDescriptorInfo;
  669. typedef struct DescriptorSetPool
  670. {
  671. SDL_Mutex *lock;
  672. VkDescriptorSetLayout descriptorSetLayout;
  673. VulkanDescriptorInfo *descriptorInfos;
  674. Uint32 descriptorInfoCount;
  675. // This is actually a descriptor set and descriptor pool simultaneously
  676. VkDescriptorPool *descriptorPools;
  677. Uint32 descriptorPoolCount;
  678. Uint32 nextPoolSize;
  679. // We just manage a pool ourselves instead of freeing the sets
  680. VkDescriptorSet *inactiveDescriptorSets;
  681. Uint32 inactiveDescriptorSetCount;
  682. Uint32 inactiveDescriptorSetCapacity;
  683. } DescriptorSetPool;
  684. typedef struct VulkanGraphicsPipelineResourceLayout
  685. {
  686. VkPipelineLayout pipelineLayout;
  687. /*
  688. * Descriptor set layout is as follows:
  689. * 0: vertex resources
  690. * 1: vertex uniform buffers
  691. * 2: fragment resources
  692. * 3: fragment uniform buffers
  693. */
  694. DescriptorSetPool descriptorSetPools[4];
  695. Uint32 vertexSamplerCount;
  696. Uint32 vertexStorageBufferCount;
  697. Uint32 vertexStorageTextureCount;
  698. Uint32 vertexUniformBufferCount;
  699. Uint32 fragmentSamplerCount;
  700. Uint32 fragmentStorageBufferCount;
  701. Uint32 fragmentStorageTextureCount;
  702. Uint32 fragmentUniformBufferCount;
  703. } VulkanGraphicsPipelineResourceLayout;
  704. typedef struct VulkanGraphicsPipeline
  705. {
  706. VkPipeline pipeline;
  707. SDL_GPUPrimitiveType primitiveType;
  708. VulkanGraphicsPipelineResourceLayout resourceLayout;
  709. VulkanShader *vertexShader;
  710. VulkanShader *fragmentShader;
  711. SDL_AtomicInt referenceCount;
  712. } VulkanGraphicsPipeline;
  713. typedef struct VulkanComputePipelineResourceLayout
  714. {
  715. VkPipelineLayout pipelineLayout;
  716. /*
  717. * Descriptor set layout is as follows:
  718. * 0: read-only textures, then read-only buffers
  719. * 1: write-only textures, then write-only buffers
  720. * 2: uniform buffers
  721. */
  722. DescriptorSetPool descriptorSetPools[3];
  723. Uint32 readOnlyStorageTextureCount;
  724. Uint32 readOnlyStorageBufferCount;
  725. Uint32 writeOnlyStorageTextureCount;
  726. Uint32 writeOnlyStorageBufferCount;
  727. Uint32 uniformBufferCount;
  728. } VulkanComputePipelineResourceLayout;
  729. typedef struct VulkanComputePipeline
  730. {
  731. VkShaderModule shaderModule;
  732. VkPipeline pipeline;
  733. VulkanComputePipelineResourceLayout resourceLayout;
  734. SDL_AtomicInt referenceCount;
  735. } VulkanComputePipeline;
  736. typedef struct RenderPassColorTargetDescription
  737. {
  738. VkFormat format;
  739. SDL_GPULoadOp loadOp;
  740. SDL_GPUStoreOp storeOp;
  741. } RenderPassColorTargetDescription;
  742. typedef struct RenderPassDepthStencilTargetDescription
  743. {
  744. VkFormat format;
  745. SDL_GPULoadOp loadOp;
  746. SDL_GPUStoreOp storeOp;
  747. SDL_GPULoadOp stencilLoadOp;
  748. SDL_GPUStoreOp stencilStoreOp;
  749. } RenderPassDepthStencilTargetDescription;
  750. typedef struct CommandPoolHashTableKey
  751. {
  752. SDL_ThreadID threadID;
  753. } CommandPoolHashTableKey;
  754. typedef struct RenderPassHashTableKey
  755. {
  756. RenderPassColorTargetDescription colorTargetDescriptions[MAX_COLOR_TARGET_BINDINGS];
  757. Uint32 colorAttachmentCount;
  758. RenderPassDepthStencilTargetDescription depthStencilTargetDescription;
  759. VkSampleCountFlagBits colorAttachmentSampleCount;
  760. } RenderPassHashTableKey;
  761. typedef struct VulkanRenderPassHashTableValue
  762. {
  763. VkRenderPass handle;
  764. } VulkanRenderPassHashTableValue;
  765. typedef struct FramebufferHashTableKey
  766. {
  767. VkImageView colorAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
  768. VkImageView colorMultiSampleAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
  769. Uint32 colorAttachmentCount;
  770. VkImageView depthStencilAttachmentView;
  771. Uint32 width;
  772. Uint32 height;
  773. } FramebufferHashTableKey;
  774. // Command structures
  775. typedef struct DescriptorSetData
  776. {
  777. DescriptorSetPool *descriptorSetPool;
  778. VkDescriptorSet descriptorSet;
  779. } DescriptorSetData;
  780. typedef struct VulkanFencePool
  781. {
  782. SDL_Mutex *lock;
  783. VulkanFenceHandle **availableFences;
  784. Uint32 availableFenceCount;
  785. Uint32 availableFenceCapacity;
  786. } VulkanFencePool;
  787. typedef struct VulkanCommandPool VulkanCommandPool;
  788. typedef struct VulkanRenderer VulkanRenderer;
  789. typedef struct VulkanCommandBuffer
  790. {
  791. CommandBufferCommonHeader common;
  792. VulkanRenderer *renderer;
  793. VkCommandBuffer commandBuffer;
  794. VulkanCommandPool *commandPool;
  795. VulkanPresentData *presentDatas;
  796. Uint32 presentDataCount;
  797. Uint32 presentDataCapacity;
  798. VkSemaphore *waitSemaphores;
  799. Uint32 waitSemaphoreCount;
  800. Uint32 waitSemaphoreCapacity;
  801. VkSemaphore *signalSemaphores;
  802. Uint32 signalSemaphoreCount;
  803. Uint32 signalSemaphoreCapacity;
  804. VulkanComputePipeline *currentComputePipeline;
  805. VulkanGraphicsPipeline *currentGraphicsPipeline;
  806. // Keep track of resources transitioned away from their default state to barrier them on pass end
  807. VulkanTextureSubresource *colorAttachmentSubresources[MAX_COLOR_TARGET_BINDINGS];
  808. Uint32 colorAttachmentSubresourceCount;
  809. VulkanTextureSubresource *depthStencilAttachmentSubresource; // may be NULL
  810. // Viewport/scissor state
  811. VkViewport currentViewport;
  812. VkRect2D currentScissor;
  813. // Resource bind state
  814. bool needNewVertexResourceDescriptorSet;
  815. bool needNewVertexUniformDescriptorSet;
  816. bool needNewVertexUniformOffsets;
  817. bool needNewFragmentResourceDescriptorSet;
  818. bool needNewFragmentUniformDescriptorSet;
  819. bool needNewFragmentUniformOffsets;
  820. bool needNewComputeReadOnlyDescriptorSet;
  821. bool needNewComputeWriteOnlyDescriptorSet;
  822. bool needNewComputeUniformDescriptorSet;
  823. bool needNewComputeUniformOffsets;
  824. VkDescriptorSet vertexResourceDescriptorSet;
  825. VkDescriptorSet vertexUniformDescriptorSet;
  826. VkDescriptorSet fragmentResourceDescriptorSet;
  827. VkDescriptorSet fragmentUniformDescriptorSet;
  828. VkDescriptorSet computeReadOnlyDescriptorSet;
  829. VkDescriptorSet computeWriteOnlyDescriptorSet;
  830. VkDescriptorSet computeUniformDescriptorSet;
  831. DescriptorSetData *boundDescriptorSetDatas;
  832. Uint32 boundDescriptorSetDataCount;
  833. Uint32 boundDescriptorSetDataCapacity;
  834. VulkanTexture *vertexSamplerTextures[MAX_TEXTURE_SAMPLERS_PER_STAGE];
  835. VulkanSampler *vertexSamplers[MAX_TEXTURE_SAMPLERS_PER_STAGE];
  836. VulkanTexture *vertexStorageTextures[MAX_STORAGE_TEXTURES_PER_STAGE];
  837. VulkanBuffer *vertexStorageBuffers[MAX_STORAGE_BUFFERS_PER_STAGE];
  838. VulkanTexture *fragmentSamplerTextures[MAX_TEXTURE_SAMPLERS_PER_STAGE];
  839. VulkanSampler *fragmentSamplers[MAX_TEXTURE_SAMPLERS_PER_STAGE];
  840. VulkanTexture *fragmentStorageTextures[MAX_STORAGE_TEXTURES_PER_STAGE];
  841. VulkanBuffer *fragmentStorageBuffers[MAX_STORAGE_BUFFERS_PER_STAGE];
  842. VulkanTextureSubresource *writeOnlyComputeStorageTextureSubresources[MAX_COMPUTE_WRITE_TEXTURES];
  843. Uint32 writeOnlyComputeStorageTextureSubresourceCount;
  844. VulkanBuffer *writeOnlyComputeStorageBuffers[MAX_COMPUTE_WRITE_BUFFERS];
  845. VulkanTexture *readOnlyComputeStorageTextures[MAX_STORAGE_TEXTURES_PER_STAGE];
  846. VulkanBuffer *readOnlyComputeStorageBuffers[MAX_STORAGE_BUFFERS_PER_STAGE];
  847. // Uniform buffers
  848. VulkanUniformBuffer *vertexUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE];
  849. VulkanUniformBuffer *fragmentUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE];
  850. VulkanUniformBuffer *computeUniformBuffers[MAX_UNIFORM_BUFFERS_PER_STAGE];
  851. // Track used resources
  852. VulkanBuffer **usedBuffers;
  853. Uint32 usedBufferCount;
  854. Uint32 usedBufferCapacity;
  855. VulkanTexture **usedTextures;
  856. Uint32 usedTextureCount;
  857. Uint32 usedTextureCapacity;
  858. VulkanSampler **usedSamplers;
  859. Uint32 usedSamplerCount;
  860. Uint32 usedSamplerCapacity;
  861. VulkanGraphicsPipeline **usedGraphicsPipelines;
  862. Uint32 usedGraphicsPipelineCount;
  863. Uint32 usedGraphicsPipelineCapacity;
  864. VulkanComputePipeline **usedComputePipelines;
  865. Uint32 usedComputePipelineCount;
  866. Uint32 usedComputePipelineCapacity;
  867. VulkanFramebuffer **usedFramebuffers;
  868. Uint32 usedFramebufferCount;
  869. Uint32 usedFramebufferCapacity;
  870. VulkanUniformBuffer **usedUniformBuffers;
  871. Uint32 usedUniformBufferCount;
  872. Uint32 usedUniformBufferCapacity;
  873. VulkanFenceHandle *inFlightFence;
  874. Uint8 autoReleaseFence;
  875. Uint8 isDefrag; // Whether this CB was created for defragging
  876. } VulkanCommandBuffer;
  877. struct VulkanCommandPool
  878. {
  879. SDL_ThreadID threadID;
  880. VkCommandPool commandPool;
  881. VulkanCommandBuffer **inactiveCommandBuffers;
  882. Uint32 inactiveCommandBufferCapacity;
  883. Uint32 inactiveCommandBufferCount;
  884. };
  885. // Context
  886. struct VulkanRenderer
  887. {
  888. VkInstance instance;
  889. VkPhysicalDevice physicalDevice;
  890. VkPhysicalDeviceProperties2KHR physicalDeviceProperties;
  891. VkPhysicalDeviceDriverPropertiesKHR physicalDeviceDriverProperties;
  892. VkDevice logicalDevice;
  893. Uint8 integratedMemoryNotification;
  894. Uint8 outOfDeviceLocalMemoryWarning;
  895. Uint8 outofBARMemoryWarning;
  896. Uint8 fillModeOnlyWarning;
  897. bool debugMode;
  898. bool preferLowPower;
  899. VulkanExtensions supports;
  900. bool supportsDebugUtils;
  901. bool supportsColorspace;
  902. bool supportsFillModeNonSolid;
  903. bool supportsMultiDrawIndirect;
  904. VulkanMemoryAllocator *memoryAllocator;
  905. VkPhysicalDeviceMemoryProperties memoryProperties;
  906. WindowData **claimedWindows;
  907. Uint32 claimedWindowCount;
  908. Uint32 claimedWindowCapacity;
  909. Uint32 queueFamilyIndex;
  910. VkQueue unifiedQueue;
  911. VulkanCommandBuffer **submittedCommandBuffers;
  912. Uint32 submittedCommandBufferCount;
  913. Uint32 submittedCommandBufferCapacity;
  914. VulkanFencePool fencePool;
  915. SDL_HashTable *commandPoolHashTable;
  916. SDL_HashTable *renderPassHashTable;
  917. SDL_HashTable *framebufferHashTable;
  918. VulkanUniformBuffer **uniformBufferPool;
  919. Uint32 uniformBufferPoolCount;
  920. Uint32 uniformBufferPoolCapacity;
  921. Uint32 minUBOAlignment;
  922. // Some drivers don't support D16 for some reason. Fun!
  923. VkFormat D16Format;
  924. VkFormat D16S8Format;
  925. // Deferred resource destruction
  926. VulkanTexture **texturesToDestroy;
  927. Uint32 texturesToDestroyCount;
  928. Uint32 texturesToDestroyCapacity;
  929. VulkanBuffer **buffersToDestroy;
  930. Uint32 buffersToDestroyCount;
  931. Uint32 buffersToDestroyCapacity;
  932. VulkanSampler **samplersToDestroy;
  933. Uint32 samplersToDestroyCount;
  934. Uint32 samplersToDestroyCapacity;
  935. VulkanGraphicsPipeline **graphicsPipelinesToDestroy;
  936. Uint32 graphicsPipelinesToDestroyCount;
  937. Uint32 graphicsPipelinesToDestroyCapacity;
  938. VulkanComputePipeline **computePipelinesToDestroy;
  939. Uint32 computePipelinesToDestroyCount;
  940. Uint32 computePipelinesToDestroyCapacity;
  941. VulkanShader **shadersToDestroy;
  942. Uint32 shadersToDestroyCount;
  943. Uint32 shadersToDestroyCapacity;
  944. VulkanFramebuffer **framebuffersToDestroy;
  945. Uint32 framebuffersToDestroyCount;
  946. Uint32 framebuffersToDestroyCapacity;
  947. SDL_Mutex *allocatorLock;
  948. SDL_Mutex *disposeLock;
  949. SDL_Mutex *submitLock;
  950. SDL_Mutex *acquireCommandBufferLock;
  951. SDL_Mutex *acquireUniformBufferLock;
  952. SDL_Mutex *renderPassFetchLock;
  953. SDL_Mutex *framebufferFetchLock;
  954. Uint8 defragInProgress;
  955. VulkanMemoryAllocation **allocationsToDefrag;
  956. Uint32 allocationsToDefragCount;
  957. Uint32 allocationsToDefragCapacity;
  958. #define VULKAN_INSTANCE_FUNCTION(func) \
  959. PFN_##func func;
  960. #define VULKAN_DEVICE_FUNCTION(func) \
  961. PFN_##func func;
  962. #include "SDL_gpu_vulkan_vkfuncs.h"
  963. };
  964. // Forward declarations
  965. static Uint8 VULKAN_INTERNAL_DefragmentMemory(VulkanRenderer *renderer);
  966. static void VULKAN_INTERNAL_BeginCommandBuffer(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer);
  967. static void VULKAN_ReleaseWindow(SDL_GPURenderer *driverData, SDL_Window *window);
  968. static void VULKAN_Wait(SDL_GPURenderer *driverData);
  969. static void VULKAN_WaitForFences(SDL_GPURenderer *driverData, bool waitAll, SDL_GPUFence **pFences, Uint32 fenceCount);
  970. static void VULKAN_Submit(SDL_GPUCommandBuffer *commandBuffer);
  971. static VulkanTexture *VULKAN_INTERNAL_CreateTexture(
  972. VulkanRenderer *renderer,
  973. Uint32 width,
  974. Uint32 height,
  975. Uint32 depth,
  976. SDL_GPUTextureType type,
  977. Uint32 layerCount,
  978. Uint32 levelCount,
  979. VkSampleCountFlagBits sampleCount,
  980. VkFormat format,
  981. VkComponentMapping swizzle,
  982. VkImageAspectFlags aspectMask,
  983. SDL_GPUTextureUsageFlags textureUsageFlags,
  984. bool isMSAAColorTarget);
  985. // Error Handling
  986. static inline const char *VkErrorMessages(VkResult code)
  987. {
  988. #define ERR_TO_STR(e) \
  989. case e: \
  990. return #e;
  991. switch (code) {
  992. ERR_TO_STR(VK_ERROR_OUT_OF_HOST_MEMORY)
  993. ERR_TO_STR(VK_ERROR_OUT_OF_DEVICE_MEMORY)
  994. ERR_TO_STR(VK_ERROR_FRAGMENTED_POOL)
  995. ERR_TO_STR(VK_ERROR_OUT_OF_POOL_MEMORY)
  996. ERR_TO_STR(VK_ERROR_INITIALIZATION_FAILED)
  997. ERR_TO_STR(VK_ERROR_LAYER_NOT_PRESENT)
  998. ERR_TO_STR(VK_ERROR_EXTENSION_NOT_PRESENT)
  999. ERR_TO_STR(VK_ERROR_FEATURE_NOT_PRESENT)
  1000. ERR_TO_STR(VK_ERROR_TOO_MANY_OBJECTS)
  1001. ERR_TO_STR(VK_ERROR_DEVICE_LOST)
  1002. ERR_TO_STR(VK_ERROR_INCOMPATIBLE_DRIVER)
  1003. ERR_TO_STR(VK_ERROR_OUT_OF_DATE_KHR)
  1004. ERR_TO_STR(VK_ERROR_SURFACE_LOST_KHR)
  1005. ERR_TO_STR(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT)
  1006. ERR_TO_STR(VK_SUBOPTIMAL_KHR)
  1007. default:
  1008. return "Unhandled VkResult!";
  1009. }
  1010. #undef ERR_TO_STR
  1011. }
  1012. static inline void LogVulkanResultAsError(
  1013. const char *vulkanFunctionName,
  1014. VkResult result)
  1015. {
  1016. if (result != VK_SUCCESS) {
  1017. SDL_LogError(
  1018. SDL_LOG_CATEGORY_GPU,
  1019. "%s: %s",
  1020. vulkanFunctionName,
  1021. VkErrorMessages(result));
  1022. }
  1023. }
  1024. #define VULKAN_ERROR_CHECK(res, fn, ret) \
  1025. if (res != VK_SUCCESS) { \
  1026. SDL_LogError(SDL_LOG_CATEGORY_GPU, "%s %s", #fn, VkErrorMessages(res)); \
  1027. return ret; \
  1028. }
  1029. // Utility
  1030. static inline bool VULKAN_INTERNAL_IsVulkanDepthFormat(VkFormat format)
  1031. {
  1032. // FIXME: Can we refactor and use the regular IsDepthFormat for this?
  1033. return (
  1034. format == SDLToVK_SurfaceFormat[SDL_GPU_TEXTUREFORMAT_D16_UNORM] ||
  1035. format == SDLToVK_SurfaceFormat[SDL_GPU_TEXTUREFORMAT_D24_UNORM] ||
  1036. format == SDLToVK_SurfaceFormat[SDL_GPU_TEXTUREFORMAT_D24_UNORM_S8_UINT] ||
  1037. format == SDLToVK_SurfaceFormat[SDL_GPU_TEXTUREFORMAT_D32_FLOAT] ||
  1038. format == SDLToVK_SurfaceFormat[SDL_GPU_TEXTUREFORMAT_D32_FLOAT_S8_UINT]);
  1039. }
  1040. static inline VkSampleCountFlagBits VULKAN_INTERNAL_GetMaxMultiSampleCount(
  1041. VulkanRenderer *renderer,
  1042. VkSampleCountFlagBits multiSampleCount)
  1043. {
  1044. VkSampleCountFlags flags = renderer->physicalDeviceProperties.properties.limits.framebufferColorSampleCounts;
  1045. VkSampleCountFlagBits maxSupported = VK_SAMPLE_COUNT_1_BIT;
  1046. if (flags & VK_SAMPLE_COUNT_8_BIT) {
  1047. maxSupported = VK_SAMPLE_COUNT_8_BIT;
  1048. } else if (flags & VK_SAMPLE_COUNT_4_BIT) {
  1049. maxSupported = VK_SAMPLE_COUNT_4_BIT;
  1050. } else if (flags & VK_SAMPLE_COUNT_2_BIT) {
  1051. maxSupported = VK_SAMPLE_COUNT_2_BIT;
  1052. }
  1053. return SDL_min(multiSampleCount, maxSupported);
  1054. }
  1055. static inline VkPolygonMode SDLToVK_PolygonMode(
  1056. VulkanRenderer *renderer,
  1057. SDL_GPUFillMode mode)
  1058. {
  1059. if (mode == SDL_GPU_FILLMODE_FILL) {
  1060. return VK_POLYGON_MODE_FILL; // always available!
  1061. }
  1062. if (renderer->supportsFillModeNonSolid && mode == SDL_GPU_FILLMODE_LINE) {
  1063. return VK_POLYGON_MODE_LINE;
  1064. }
  1065. if (!renderer->fillModeOnlyWarning) {
  1066. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Unsupported fill mode requested, using FILL!");
  1067. renderer->fillModeOnlyWarning = 1;
  1068. }
  1069. return VK_POLYGON_MODE_FILL;
  1070. }
  1071. // Memory Management
  1072. // Vulkan: Memory Allocation
  1073. static inline VkDeviceSize VULKAN_INTERNAL_NextHighestAlignment(
  1074. VkDeviceSize n,
  1075. VkDeviceSize align)
  1076. {
  1077. return align * ((n + align - 1) / align);
  1078. }
  1079. static inline Uint32 VULKAN_INTERNAL_NextHighestAlignment32(
  1080. Uint32 n,
  1081. Uint32 align)
  1082. {
  1083. return align * ((n + align - 1) / align);
  1084. }
  1085. static void VULKAN_INTERNAL_MakeMemoryUnavailable(
  1086. VulkanRenderer *renderer,
  1087. VulkanMemoryAllocation *allocation)
  1088. {
  1089. Uint32 i, j;
  1090. VulkanMemoryFreeRegion *freeRegion;
  1091. allocation->availableForAllocation = 0;
  1092. for (i = 0; i < allocation->freeRegionCount; i += 1) {
  1093. freeRegion = allocation->freeRegions[i];
  1094. // close the gap in the sorted list
  1095. if (allocation->allocator->sortedFreeRegionCount > 1) {
  1096. for (j = freeRegion->sortedIndex; j < allocation->allocator->sortedFreeRegionCount - 1; j += 1) {
  1097. allocation->allocator->sortedFreeRegions[j] =
  1098. allocation->allocator->sortedFreeRegions[j + 1];
  1099. allocation->allocator->sortedFreeRegions[j]->sortedIndex = j;
  1100. }
  1101. }
  1102. allocation->allocator->sortedFreeRegionCount -= 1;
  1103. }
  1104. }
  1105. static void VULKAN_INTERNAL_MarkAllocationsForDefrag(
  1106. VulkanRenderer *renderer)
  1107. {
  1108. Uint32 memoryType, allocationIndex;
  1109. VulkanMemorySubAllocator *currentAllocator;
  1110. for (memoryType = 0; memoryType < VK_MAX_MEMORY_TYPES; memoryType += 1) {
  1111. currentAllocator = &renderer->memoryAllocator->subAllocators[memoryType];
  1112. for (allocationIndex = 0; allocationIndex < currentAllocator->allocationCount; allocationIndex += 1) {
  1113. if (currentAllocator->allocations[allocationIndex]->availableForAllocation == 1) {
  1114. if (currentAllocator->allocations[allocationIndex]->freeRegionCount > 1) {
  1115. EXPAND_ARRAY_IF_NEEDED(
  1116. renderer->allocationsToDefrag,
  1117. VulkanMemoryAllocation *,
  1118. renderer->allocationsToDefragCount + 1,
  1119. renderer->allocationsToDefragCapacity,
  1120. renderer->allocationsToDefragCapacity * 2);
  1121. renderer->allocationsToDefrag[renderer->allocationsToDefragCount] =
  1122. currentAllocator->allocations[allocationIndex];
  1123. renderer->allocationsToDefragCount += 1;
  1124. VULKAN_INTERNAL_MakeMemoryUnavailable(
  1125. renderer,
  1126. currentAllocator->allocations[allocationIndex]);
  1127. }
  1128. }
  1129. }
  1130. }
  1131. }
  1132. static void VULKAN_INTERNAL_RemoveMemoryFreeRegion(
  1133. VulkanRenderer *renderer,
  1134. VulkanMemoryFreeRegion *freeRegion)
  1135. {
  1136. Uint32 i;
  1137. SDL_LockMutex(renderer->allocatorLock);
  1138. if (freeRegion->allocation->availableForAllocation) {
  1139. // close the gap in the sorted list
  1140. if (freeRegion->allocation->allocator->sortedFreeRegionCount > 1) {
  1141. for (i = freeRegion->sortedIndex; i < freeRegion->allocation->allocator->sortedFreeRegionCount - 1; i += 1) {
  1142. freeRegion->allocation->allocator->sortedFreeRegions[i] =
  1143. freeRegion->allocation->allocator->sortedFreeRegions[i + 1];
  1144. freeRegion->allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
  1145. }
  1146. }
  1147. freeRegion->allocation->allocator->sortedFreeRegionCount -= 1;
  1148. }
  1149. // close the gap in the buffer list
  1150. if (freeRegion->allocation->freeRegionCount > 1 && freeRegion->allocationIndex != freeRegion->allocation->freeRegionCount - 1) {
  1151. freeRegion->allocation->freeRegions[freeRegion->allocationIndex] =
  1152. freeRegion->allocation->freeRegions[freeRegion->allocation->freeRegionCount - 1];
  1153. freeRegion->allocation->freeRegions[freeRegion->allocationIndex]->allocationIndex =
  1154. freeRegion->allocationIndex;
  1155. }
  1156. freeRegion->allocation->freeRegionCount -= 1;
  1157. freeRegion->allocation->freeSpace -= freeRegion->size;
  1158. SDL_free(freeRegion);
  1159. SDL_UnlockMutex(renderer->allocatorLock);
  1160. }
  1161. static void VULKAN_INTERNAL_NewMemoryFreeRegion(
  1162. VulkanRenderer *renderer,
  1163. VulkanMemoryAllocation *allocation,
  1164. VkDeviceSize offset,
  1165. VkDeviceSize size)
  1166. {
  1167. VulkanMemoryFreeRegion *newFreeRegion;
  1168. VkDeviceSize newOffset, newSize;
  1169. Sint32 insertionIndex = 0;
  1170. SDL_LockMutex(renderer->allocatorLock);
  1171. // look for an adjacent region to merge
  1172. for (Sint32 i = allocation->freeRegionCount - 1; i >= 0; i -= 1) {
  1173. // check left side
  1174. if (allocation->freeRegions[i]->offset + allocation->freeRegions[i]->size == offset) {
  1175. newOffset = allocation->freeRegions[i]->offset;
  1176. newSize = allocation->freeRegions[i]->size + size;
  1177. VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]);
  1178. VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize);
  1179. SDL_UnlockMutex(renderer->allocatorLock);
  1180. return;
  1181. }
  1182. // check right side
  1183. if (allocation->freeRegions[i]->offset == offset + size) {
  1184. newOffset = offset;
  1185. newSize = allocation->freeRegions[i]->size + size;
  1186. VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]);
  1187. VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize);
  1188. SDL_UnlockMutex(renderer->allocatorLock);
  1189. return;
  1190. }
  1191. }
  1192. // region is not contiguous with another free region, make a new one
  1193. allocation->freeRegionCount += 1;
  1194. if (allocation->freeRegionCount > allocation->freeRegionCapacity) {
  1195. allocation->freeRegionCapacity *= 2;
  1196. allocation->freeRegions = SDL_realloc(
  1197. allocation->freeRegions,
  1198. sizeof(VulkanMemoryFreeRegion *) * allocation->freeRegionCapacity);
  1199. }
  1200. newFreeRegion = SDL_malloc(sizeof(VulkanMemoryFreeRegion));
  1201. newFreeRegion->offset = offset;
  1202. newFreeRegion->size = size;
  1203. newFreeRegion->allocation = allocation;
  1204. allocation->freeSpace += size;
  1205. allocation->freeRegions[allocation->freeRegionCount - 1] = newFreeRegion;
  1206. newFreeRegion->allocationIndex = allocation->freeRegionCount - 1;
  1207. if (allocation->availableForAllocation) {
  1208. for (Uint32 i = 0; i < allocation->allocator->sortedFreeRegionCount; i += 1) {
  1209. if (allocation->allocator->sortedFreeRegions[i]->size < size) {
  1210. // this is where the new region should go
  1211. break;
  1212. }
  1213. insertionIndex += 1;
  1214. }
  1215. if (allocation->allocator->sortedFreeRegionCount + 1 > allocation->allocator->sortedFreeRegionCapacity) {
  1216. allocation->allocator->sortedFreeRegionCapacity *= 2;
  1217. allocation->allocator->sortedFreeRegions = SDL_realloc(
  1218. allocation->allocator->sortedFreeRegions,
  1219. sizeof(VulkanMemoryFreeRegion *) * allocation->allocator->sortedFreeRegionCapacity);
  1220. }
  1221. // perform insertion sort
  1222. if (allocation->allocator->sortedFreeRegionCount > 0 && insertionIndex != allocation->allocator->sortedFreeRegionCount) {
  1223. for (Sint32 i = allocation->allocator->sortedFreeRegionCount; i > insertionIndex && i > 0; i -= 1) {
  1224. allocation->allocator->sortedFreeRegions[i] = allocation->allocator->sortedFreeRegions[i - 1];
  1225. allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
  1226. }
  1227. }
  1228. allocation->allocator->sortedFreeRegionCount += 1;
  1229. allocation->allocator->sortedFreeRegions[insertionIndex] = newFreeRegion;
  1230. newFreeRegion->sortedIndex = insertionIndex;
  1231. }
  1232. SDL_UnlockMutex(renderer->allocatorLock);
  1233. }
  1234. static VulkanMemoryUsedRegion *VULKAN_INTERNAL_NewMemoryUsedRegion(
  1235. VulkanRenderer *renderer,
  1236. VulkanMemoryAllocation *allocation,
  1237. VkDeviceSize offset,
  1238. VkDeviceSize size,
  1239. VkDeviceSize resourceOffset,
  1240. VkDeviceSize resourceSize,
  1241. VkDeviceSize alignment)
  1242. {
  1243. VulkanMemoryUsedRegion *memoryUsedRegion;
  1244. SDL_LockMutex(renderer->allocatorLock);
  1245. if (allocation->usedRegionCount == allocation->usedRegionCapacity) {
  1246. allocation->usedRegionCapacity *= 2;
  1247. allocation->usedRegions = SDL_realloc(
  1248. allocation->usedRegions,
  1249. allocation->usedRegionCapacity * sizeof(VulkanMemoryUsedRegion *));
  1250. }
  1251. memoryUsedRegion = SDL_malloc(sizeof(VulkanMemoryUsedRegion));
  1252. memoryUsedRegion->allocation = allocation;
  1253. memoryUsedRegion->offset = offset;
  1254. memoryUsedRegion->size = size;
  1255. memoryUsedRegion->resourceOffset = resourceOffset;
  1256. memoryUsedRegion->resourceSize = resourceSize;
  1257. memoryUsedRegion->alignment = alignment;
  1258. allocation->usedSpace += size;
  1259. allocation->usedRegions[allocation->usedRegionCount] = memoryUsedRegion;
  1260. allocation->usedRegionCount += 1;
  1261. SDL_UnlockMutex(renderer->allocatorLock);
  1262. return memoryUsedRegion;
  1263. }
  1264. static void VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  1265. VulkanRenderer *renderer,
  1266. VulkanMemoryUsedRegion *usedRegion)
  1267. {
  1268. Uint32 i;
  1269. SDL_LockMutex(renderer->allocatorLock);
  1270. for (i = 0; i < usedRegion->allocation->usedRegionCount; i += 1) {
  1271. if (usedRegion->allocation->usedRegions[i] == usedRegion) {
  1272. // plug the hole
  1273. if (i != usedRegion->allocation->usedRegionCount - 1) {
  1274. usedRegion->allocation->usedRegions[i] = usedRegion->allocation->usedRegions[usedRegion->allocation->usedRegionCount - 1];
  1275. }
  1276. break;
  1277. }
  1278. }
  1279. usedRegion->allocation->usedSpace -= usedRegion->size;
  1280. usedRegion->allocation->usedRegionCount -= 1;
  1281. VULKAN_INTERNAL_NewMemoryFreeRegion(
  1282. renderer,
  1283. usedRegion->allocation,
  1284. usedRegion->offset,
  1285. usedRegion->size);
  1286. SDL_free(usedRegion);
  1287. SDL_UnlockMutex(renderer->allocatorLock);
  1288. }
  1289. static bool VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
  1290. Uint32 memoryTypeIndex,
  1291. Uint32 *memoryTypeIndexArray,
  1292. Uint32 count)
  1293. {
  1294. Uint32 i = 0;
  1295. for (i = 0; i < count; i += 1) {
  1296. if (memoryTypeIndexArray[i] == memoryTypeIndex) {
  1297. return false;
  1298. }
  1299. }
  1300. return true;
  1301. }
  1302. /* Returns an array of memory type indices in order of preference.
  1303. * Memory types are requested with the following three guidelines:
  1304. *
  1305. * Required: Absolutely necessary
  1306. * Preferred: Nice to have, but not necessary
  1307. * Tolerable: Can be allowed if there are no other options
  1308. *
  1309. * We return memory types in this order:
  1310. * 1. Required and preferred. This is the best category.
  1311. * 2. Required only.
  1312. * 3. Required, preferred, and tolerable.
  1313. * 4. Required and tolerable. This is the worst category.
  1314. */
  1315. static Uint32 *VULKAN_INTERNAL_FindBestMemoryTypes(
  1316. VulkanRenderer *renderer,
  1317. Uint32 typeFilter,
  1318. VkMemoryPropertyFlags requiredProperties,
  1319. VkMemoryPropertyFlags preferredProperties,
  1320. VkMemoryPropertyFlags tolerableProperties,
  1321. Uint32 *pCount)
  1322. {
  1323. Uint32 i;
  1324. Uint32 index = 0;
  1325. Uint32 *result = SDL_malloc(sizeof(Uint32) * renderer->memoryProperties.memoryTypeCount);
  1326. // required + preferred + !tolerable
  1327. for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
  1328. if ((typeFilter & (1 << i)) &&
  1329. (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
  1330. (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == preferredProperties &&
  1331. (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == 0) {
  1332. if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
  1333. i,
  1334. result,
  1335. index)) {
  1336. result[index] = i;
  1337. index += 1;
  1338. }
  1339. }
  1340. }
  1341. // required + !preferred + !tolerable
  1342. for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
  1343. if ((typeFilter & (1 << i)) &&
  1344. (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
  1345. (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == 0 &&
  1346. (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == 0) {
  1347. if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
  1348. i,
  1349. result,
  1350. index)) {
  1351. result[index] = i;
  1352. index += 1;
  1353. }
  1354. }
  1355. }
  1356. // required + preferred + tolerable
  1357. for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
  1358. if ((typeFilter & (1 << i)) &&
  1359. (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
  1360. (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == preferredProperties &&
  1361. (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == tolerableProperties) {
  1362. if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
  1363. i,
  1364. result,
  1365. index)) {
  1366. result[index] = i;
  1367. index += 1;
  1368. }
  1369. }
  1370. }
  1371. // required + !preferred + tolerable
  1372. for (i = 0; i < renderer->memoryProperties.memoryTypeCount; i += 1) {
  1373. if ((typeFilter & (1 << i)) &&
  1374. (renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
  1375. (renderer->memoryProperties.memoryTypes[i].propertyFlags & preferredProperties) == 0 &&
  1376. (renderer->memoryProperties.memoryTypes[i].propertyFlags & tolerableProperties) == tolerableProperties) {
  1377. if (VULKAN_INTERNAL_CheckMemoryTypeArrayUnique(
  1378. i,
  1379. result,
  1380. index)) {
  1381. result[index] = i;
  1382. index += 1;
  1383. }
  1384. }
  1385. }
  1386. *pCount = index;
  1387. return result;
  1388. }
  1389. static Uint32 *VULKAN_INTERNAL_FindBestBufferMemoryTypes(
  1390. VulkanRenderer *renderer,
  1391. VkBuffer buffer,
  1392. VkMemoryPropertyFlags requiredMemoryProperties,
  1393. VkMemoryPropertyFlags preferredMemoryProperties,
  1394. VkMemoryPropertyFlags tolerableMemoryProperties,
  1395. VkMemoryRequirements *pMemoryRequirements,
  1396. Uint32 *pCount)
  1397. {
  1398. renderer->vkGetBufferMemoryRequirements(
  1399. renderer->logicalDevice,
  1400. buffer,
  1401. pMemoryRequirements);
  1402. return VULKAN_INTERNAL_FindBestMemoryTypes(
  1403. renderer,
  1404. pMemoryRequirements->memoryTypeBits,
  1405. requiredMemoryProperties,
  1406. preferredMemoryProperties,
  1407. tolerableMemoryProperties,
  1408. pCount);
  1409. }
  1410. static Uint32 *VULKAN_INTERNAL_FindBestImageMemoryTypes(
  1411. VulkanRenderer *renderer,
  1412. VkImage image,
  1413. VkMemoryPropertyFlags preferredMemoryPropertyFlags,
  1414. VkMemoryRequirements *pMemoryRequirements,
  1415. Uint32 *pCount)
  1416. {
  1417. renderer->vkGetImageMemoryRequirements(
  1418. renderer->logicalDevice,
  1419. image,
  1420. pMemoryRequirements);
  1421. return VULKAN_INTERNAL_FindBestMemoryTypes(
  1422. renderer,
  1423. pMemoryRequirements->memoryTypeBits,
  1424. 0,
  1425. preferredMemoryPropertyFlags,
  1426. 0,
  1427. pCount);
  1428. }
  1429. static void VULKAN_INTERNAL_DeallocateMemory(
  1430. VulkanRenderer *renderer,
  1431. VulkanMemorySubAllocator *allocator,
  1432. Uint32 allocationIndex)
  1433. {
  1434. Uint32 i;
  1435. VulkanMemoryAllocation *allocation = allocator->allocations[allocationIndex];
  1436. SDL_LockMutex(renderer->allocatorLock);
  1437. // If this allocation was marked for defrag, cancel that
  1438. for (i = 0; i < renderer->allocationsToDefragCount; i += 1) {
  1439. if (allocation == renderer->allocationsToDefrag[i]) {
  1440. renderer->allocationsToDefrag[i] = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1];
  1441. renderer->allocationsToDefragCount -= 1;
  1442. break;
  1443. }
  1444. }
  1445. for (i = 0; i < allocation->freeRegionCount; i += 1) {
  1446. VULKAN_INTERNAL_RemoveMemoryFreeRegion(
  1447. renderer,
  1448. allocation->freeRegions[i]);
  1449. }
  1450. SDL_free(allocation->freeRegions);
  1451. /* no need to iterate used regions because deallocate
  1452. * only happens when there are 0 used regions
  1453. */
  1454. SDL_free(allocation->usedRegions);
  1455. renderer->vkFreeMemory(
  1456. renderer->logicalDevice,
  1457. allocation->memory,
  1458. NULL);
  1459. SDL_DestroyMutex(allocation->memoryLock);
  1460. SDL_free(allocation);
  1461. if (allocationIndex != allocator->allocationCount - 1) {
  1462. allocator->allocations[allocationIndex] = allocator->allocations[allocator->allocationCount - 1];
  1463. }
  1464. allocator->allocationCount -= 1;
  1465. SDL_UnlockMutex(renderer->allocatorLock);
  1466. }
  1467. static Uint8 VULKAN_INTERNAL_AllocateMemory(
  1468. VulkanRenderer *renderer,
  1469. VkBuffer buffer,
  1470. VkImage image,
  1471. Uint32 memoryTypeIndex,
  1472. VkDeviceSize allocationSize,
  1473. Uint8 isHostVisible,
  1474. VulkanMemoryAllocation **pMemoryAllocation)
  1475. {
  1476. VulkanMemoryAllocation *allocation;
  1477. VulkanMemorySubAllocator *allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
  1478. VkMemoryAllocateInfo allocInfo;
  1479. VkResult result;
  1480. allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
  1481. allocInfo.pNext = NULL;
  1482. allocInfo.memoryTypeIndex = memoryTypeIndex;
  1483. allocInfo.allocationSize = allocationSize;
  1484. allocation = SDL_malloc(sizeof(VulkanMemoryAllocation));
  1485. allocation->size = allocationSize;
  1486. allocation->freeSpace = 0; // added by FreeRegions
  1487. allocation->usedSpace = 0; // added by UsedRegions
  1488. allocation->memoryLock = SDL_CreateMutex();
  1489. allocator->allocationCount += 1;
  1490. allocator->allocations = SDL_realloc(
  1491. allocator->allocations,
  1492. sizeof(VulkanMemoryAllocation *) * allocator->allocationCount);
  1493. allocator->allocations[allocator->allocationCount - 1] = allocation;
  1494. allocInfo.pNext = NULL;
  1495. allocation->availableForAllocation = 1;
  1496. allocation->usedRegions = SDL_malloc(sizeof(VulkanMemoryUsedRegion *));
  1497. allocation->usedRegionCount = 0;
  1498. allocation->usedRegionCapacity = 1;
  1499. allocation->freeRegions = SDL_malloc(sizeof(VulkanMemoryFreeRegion *));
  1500. allocation->freeRegionCount = 0;
  1501. allocation->freeRegionCapacity = 1;
  1502. allocation->allocator = allocator;
  1503. result = renderer->vkAllocateMemory(
  1504. renderer->logicalDevice,
  1505. &allocInfo,
  1506. NULL,
  1507. &allocation->memory);
  1508. if (result != VK_SUCCESS) {
  1509. // Uh oh, we couldn't allocate, time to clean up
  1510. SDL_free(allocation->freeRegions);
  1511. allocator->allocationCount -= 1;
  1512. allocator->allocations = SDL_realloc(
  1513. allocator->allocations,
  1514. sizeof(VulkanMemoryAllocation *) * allocator->allocationCount);
  1515. SDL_free(allocation);
  1516. return 0;
  1517. }
  1518. // Persistent mapping for host-visible memory
  1519. if (isHostVisible) {
  1520. result = renderer->vkMapMemory(
  1521. renderer->logicalDevice,
  1522. allocation->memory,
  1523. 0,
  1524. VK_WHOLE_SIZE,
  1525. 0,
  1526. (void **)&allocation->mapPointer);
  1527. VULKAN_ERROR_CHECK(result, vkMapMemory, 0)
  1528. } else {
  1529. allocation->mapPointer = NULL;
  1530. }
  1531. VULKAN_INTERNAL_NewMemoryFreeRegion(
  1532. renderer,
  1533. allocation,
  1534. 0,
  1535. allocation->size);
  1536. *pMemoryAllocation = allocation;
  1537. return 1;
  1538. }
  1539. static Uint8 VULKAN_INTERNAL_BindBufferMemory(
  1540. VulkanRenderer *renderer,
  1541. VulkanMemoryUsedRegion *usedRegion,
  1542. VkDeviceSize alignedOffset,
  1543. VkBuffer buffer)
  1544. {
  1545. VkResult vulkanResult;
  1546. SDL_LockMutex(usedRegion->allocation->memoryLock);
  1547. vulkanResult = renderer->vkBindBufferMemory(
  1548. renderer->logicalDevice,
  1549. buffer,
  1550. usedRegion->allocation->memory,
  1551. alignedOffset);
  1552. SDL_UnlockMutex(usedRegion->allocation->memoryLock);
  1553. VULKAN_ERROR_CHECK(vulkanResult, vkBindBufferMemory, 0)
  1554. return 1;
  1555. }
  1556. static Uint8 VULKAN_INTERNAL_BindImageMemory(
  1557. VulkanRenderer *renderer,
  1558. VulkanMemoryUsedRegion *usedRegion,
  1559. VkDeviceSize alignedOffset,
  1560. VkImage image)
  1561. {
  1562. VkResult vulkanResult;
  1563. SDL_LockMutex(usedRegion->allocation->memoryLock);
  1564. vulkanResult = renderer->vkBindImageMemory(
  1565. renderer->logicalDevice,
  1566. image,
  1567. usedRegion->allocation->memory,
  1568. alignedOffset);
  1569. SDL_UnlockMutex(usedRegion->allocation->memoryLock);
  1570. VULKAN_ERROR_CHECK(vulkanResult, vkBindBufferMemory, 0)
  1571. return 1;
  1572. }
  1573. static Uint8 VULKAN_INTERNAL_BindResourceMemory(
  1574. VulkanRenderer *renderer,
  1575. Uint32 memoryTypeIndex,
  1576. VkMemoryRequirements *memoryRequirements,
  1577. VkDeviceSize resourceSize, // may be different from requirements size!
  1578. VkBuffer buffer, // may be VK_NULL_HANDLE
  1579. VkImage image, // may be VK_NULL_HANDLE
  1580. VulkanMemoryUsedRegion **pMemoryUsedRegion)
  1581. {
  1582. VulkanMemoryAllocation *allocation;
  1583. VulkanMemorySubAllocator *allocator;
  1584. VulkanMemoryFreeRegion *region;
  1585. VulkanMemoryFreeRegion *selectedRegion;
  1586. VulkanMemoryUsedRegion *usedRegion;
  1587. VkDeviceSize requiredSize, allocationSize;
  1588. VkDeviceSize alignedOffset;
  1589. VkDeviceSize newRegionSize, newRegionOffset;
  1590. Uint8 isHostVisible, smallAllocation, allocationResult;
  1591. Sint32 i;
  1592. isHostVisible =
  1593. (renderer->memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags &
  1594. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
  1595. allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
  1596. requiredSize = memoryRequirements->size;
  1597. smallAllocation = requiredSize <= SMALL_ALLOCATION_THRESHOLD;
  1598. if ((buffer == VK_NULL_HANDLE && image == VK_NULL_HANDLE) ||
  1599. (buffer != VK_NULL_HANDLE && image != VK_NULL_HANDLE)) {
  1600. SDL_LogError(SDL_LOG_CATEGORY_GPU, "BindResourceMemory must be given either a VulkanBuffer or a VulkanTexture");
  1601. return 0;
  1602. }
  1603. SDL_LockMutex(renderer->allocatorLock);
  1604. selectedRegion = NULL;
  1605. for (i = allocator->sortedFreeRegionCount - 1; i >= 0; i -= 1) {
  1606. region = allocator->sortedFreeRegions[i];
  1607. if (smallAllocation && region->allocation->size != SMALL_ALLOCATION_SIZE) {
  1608. // region is not in a small allocation
  1609. continue;
  1610. }
  1611. if (!smallAllocation && region->allocation->size == SMALL_ALLOCATION_SIZE) {
  1612. // allocation is not small and current region is in a small allocation
  1613. continue;
  1614. }
  1615. alignedOffset = VULKAN_INTERNAL_NextHighestAlignment(
  1616. region->offset,
  1617. memoryRequirements->alignment);
  1618. if (alignedOffset + requiredSize <= region->offset + region->size) {
  1619. selectedRegion = region;
  1620. break;
  1621. }
  1622. }
  1623. if (selectedRegion != NULL) {
  1624. region = selectedRegion;
  1625. allocation = region->allocation;
  1626. usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion(
  1627. renderer,
  1628. allocation,
  1629. region->offset,
  1630. requiredSize + (alignedOffset - region->offset),
  1631. alignedOffset,
  1632. resourceSize,
  1633. memoryRequirements->alignment);
  1634. usedRegion->isBuffer = buffer != VK_NULL_HANDLE;
  1635. newRegionSize = region->size - ((alignedOffset - region->offset) + requiredSize);
  1636. newRegionOffset = alignedOffset + requiredSize;
  1637. // remove and add modified region to re-sort
  1638. VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region);
  1639. // if size is 0, no need to re-insert
  1640. if (newRegionSize != 0) {
  1641. VULKAN_INTERNAL_NewMemoryFreeRegion(
  1642. renderer,
  1643. allocation,
  1644. newRegionOffset,
  1645. newRegionSize);
  1646. }
  1647. SDL_UnlockMutex(renderer->allocatorLock);
  1648. if (buffer != VK_NULL_HANDLE) {
  1649. if (!VULKAN_INTERNAL_BindBufferMemory(
  1650. renderer,
  1651. usedRegion,
  1652. alignedOffset,
  1653. buffer)) {
  1654. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  1655. renderer,
  1656. usedRegion);
  1657. return 0;
  1658. }
  1659. } else if (image != VK_NULL_HANDLE) {
  1660. if (!VULKAN_INTERNAL_BindImageMemory(
  1661. renderer,
  1662. usedRegion,
  1663. alignedOffset,
  1664. image)) {
  1665. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  1666. renderer,
  1667. usedRegion);
  1668. return 0;
  1669. }
  1670. }
  1671. *pMemoryUsedRegion = usedRegion;
  1672. return 1;
  1673. }
  1674. // No suitable free regions exist, allocate a new memory region
  1675. if (
  1676. renderer->allocationsToDefragCount == 0 &&
  1677. !renderer->defragInProgress) {
  1678. // Mark currently fragmented allocations for defrag
  1679. VULKAN_INTERNAL_MarkAllocationsForDefrag(renderer);
  1680. }
  1681. if (requiredSize > SMALL_ALLOCATION_THRESHOLD) {
  1682. // allocate a page of required size aligned to LARGE_ALLOCATION_INCREMENT increments
  1683. allocationSize =
  1684. VULKAN_INTERNAL_NextHighestAlignment(requiredSize, LARGE_ALLOCATION_INCREMENT);
  1685. } else {
  1686. allocationSize = SMALL_ALLOCATION_SIZE;
  1687. }
  1688. allocationResult = VULKAN_INTERNAL_AllocateMemory(
  1689. renderer,
  1690. buffer,
  1691. image,
  1692. memoryTypeIndex,
  1693. allocationSize,
  1694. isHostVisible,
  1695. &allocation);
  1696. // Uh oh, we're out of memory
  1697. if (allocationResult == 0) {
  1698. SDL_UnlockMutex(renderer->allocatorLock);
  1699. // Responsibility of the caller to handle being out of memory
  1700. return 2;
  1701. }
  1702. usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion(
  1703. renderer,
  1704. allocation,
  1705. 0,
  1706. requiredSize,
  1707. 0,
  1708. resourceSize,
  1709. memoryRequirements->alignment);
  1710. usedRegion->isBuffer = buffer != VK_NULL_HANDLE;
  1711. region = allocation->freeRegions[0];
  1712. newRegionOffset = region->offset + requiredSize;
  1713. newRegionSize = region->size - requiredSize;
  1714. VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region);
  1715. if (newRegionSize != 0) {
  1716. VULKAN_INTERNAL_NewMemoryFreeRegion(
  1717. renderer,
  1718. allocation,
  1719. newRegionOffset,
  1720. newRegionSize);
  1721. }
  1722. SDL_UnlockMutex(renderer->allocatorLock);
  1723. if (buffer != VK_NULL_HANDLE) {
  1724. if (!VULKAN_INTERNAL_BindBufferMemory(
  1725. renderer,
  1726. usedRegion,
  1727. 0,
  1728. buffer)) {
  1729. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  1730. renderer,
  1731. usedRegion);
  1732. return 0;
  1733. }
  1734. } else if (image != VK_NULL_HANDLE) {
  1735. if (!VULKAN_INTERNAL_BindImageMemory(
  1736. renderer,
  1737. usedRegion,
  1738. 0,
  1739. image)) {
  1740. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  1741. renderer,
  1742. usedRegion);
  1743. return 0;
  1744. }
  1745. }
  1746. *pMemoryUsedRegion = usedRegion;
  1747. return 1;
  1748. }
  1749. static Uint8 VULKAN_INTERNAL_BindMemoryForImage(
  1750. VulkanRenderer *renderer,
  1751. VkImage image,
  1752. VulkanMemoryUsedRegion **usedRegion)
  1753. {
  1754. Uint8 bindResult = 0;
  1755. Uint32 memoryTypeCount = 0;
  1756. Uint32 *memoryTypesToTry = NULL;
  1757. Uint32 selectedMemoryTypeIndex = 0;
  1758. Uint32 i;
  1759. VkMemoryPropertyFlags preferredMemoryPropertyFlags;
  1760. VkMemoryRequirements memoryRequirements;
  1761. /* Vulkan memory types have several memory properties.
  1762. *
  1763. * Unlike buffers, images are always optimally stored device-local,
  1764. * so that is the only property we prefer here.
  1765. *
  1766. * If memory is constrained, it is fine for the texture to not
  1767. * be device-local.
  1768. */
  1769. preferredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  1770. memoryTypesToTry = VULKAN_INTERNAL_FindBestImageMemoryTypes(
  1771. renderer,
  1772. image,
  1773. preferredMemoryPropertyFlags,
  1774. &memoryRequirements,
  1775. &memoryTypeCount);
  1776. for (i = 0; i < memoryTypeCount; i += 1) {
  1777. bindResult = VULKAN_INTERNAL_BindResourceMemory(
  1778. renderer,
  1779. memoryTypesToTry[i],
  1780. &memoryRequirements,
  1781. memoryRequirements.size,
  1782. VK_NULL_HANDLE,
  1783. image,
  1784. usedRegion);
  1785. if (bindResult == 1) {
  1786. selectedMemoryTypeIndex = memoryTypesToTry[i];
  1787. break;
  1788. }
  1789. }
  1790. SDL_free(memoryTypesToTry);
  1791. // Check for warnings on success
  1792. if (bindResult == 1) {
  1793. if (!renderer->outOfDeviceLocalMemoryWarning) {
  1794. if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) {
  1795. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of device-local memory, allocating textures on host-local memory!");
  1796. renderer->outOfDeviceLocalMemoryWarning = 1;
  1797. }
  1798. }
  1799. }
  1800. return bindResult;
  1801. }
  1802. static Uint8 VULKAN_INTERNAL_BindMemoryForBuffer(
  1803. VulkanRenderer *renderer,
  1804. VkBuffer buffer,
  1805. VkDeviceSize size,
  1806. VulkanBufferType type,
  1807. VulkanMemoryUsedRegion **usedRegion)
  1808. {
  1809. Uint8 bindResult = 0;
  1810. Uint32 memoryTypeCount = 0;
  1811. Uint32 *memoryTypesToTry = NULL;
  1812. Uint32 selectedMemoryTypeIndex = 0;
  1813. Uint32 i;
  1814. VkMemoryPropertyFlags requiredMemoryPropertyFlags = 0;
  1815. VkMemoryPropertyFlags preferredMemoryPropertyFlags = 0;
  1816. VkMemoryPropertyFlags tolerableMemoryPropertyFlags = 0;
  1817. VkMemoryRequirements memoryRequirements;
  1818. /* Buffers need to be optimally bound to a memory type
  1819. * based on their use case and the architecture of the system.
  1820. *
  1821. * It is important to understand the distinction between device and host.
  1822. *
  1823. * On a traditional high-performance desktop computer,
  1824. * the "device" would be the GPU, and the "host" would be the CPU.
  1825. * Memory being copied between these two must cross the PCI bus.
  1826. * On these systems we have to be concerned about bandwidth limitations
  1827. * and causing memory stalls, so we have taken a great deal of care
  1828. * to structure this API to guide the client towards optimal usage.
  1829. *
  1830. * Other kinds of devices do not necessarily have this distinction.
  1831. * On an iPhone or Nintendo Switch, all memory is accessible both to the
  1832. * GPU and the CPU at all times. These kinds of systems are known as
  1833. * UMA, or Unified Memory Architecture. A desktop computer using the
  1834. * CPU's integrated graphics can also be thought of as UMA.
  1835. *
  1836. * Vulkan memory types have several memory properties.
  1837. * The relevant memory properties are as follows:
  1838. *
  1839. * DEVICE_LOCAL:
  1840. * This memory is on-device and most efficient for device access.
  1841. * On UMA systems all memory is device-local.
  1842. * If memory is not device-local, then it is host-local.
  1843. *
  1844. * HOST_VISIBLE:
  1845. * This memory can be mapped for host access, meaning we can obtain
  1846. * a pointer to directly access the memory.
  1847. *
  1848. * HOST_COHERENT:
  1849. * Host-coherent memory does not require cache management operations
  1850. * when mapped, so we always set this alongside HOST_VISIBLE
  1851. * to avoid extra record keeping.
  1852. *
  1853. * HOST_CACHED:
  1854. * Host-cached memory is faster to access than uncached memory
  1855. * but memory of this type might not always be available.
  1856. *
  1857. * GPU buffers, like vertex buffers, indirect buffers, etc
  1858. * are optimally stored in device-local memory.
  1859. * However, if device-local memory is low, these buffers
  1860. * can be accessed from host-local memory with a performance penalty.
  1861. *
  1862. * Uniform buffers must be host-visible and coherent because
  1863. * the client uses them to quickly push small amounts of data.
  1864. * We prefer uniform buffers to also be device-local because
  1865. * they are accessed by shaders, but the amount of memory
  1866. * that is both device-local and host-visible
  1867. * is often constrained, particularly on low-end devices.
  1868. *
  1869. * Transfer buffers must be host-visible and coherent because
  1870. * the client uses them to stage data to be transferred
  1871. * to device-local memory, or to read back data transferred
  1872. * from the device. We prefer the cache bit for performance
  1873. * but it isn't strictly necessary. We tolerate device-local
  1874. * memory in this situation because, as mentioned above,
  1875. * on certain devices all memory is device-local, and even
  1876. * though the transfer isn't strictly necessary it is still
  1877. * useful for correctly timelining data.
  1878. */
  1879. if (type == VULKAN_BUFFER_TYPE_GPU) {
  1880. preferredMemoryPropertyFlags |=
  1881. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  1882. } else if (type == VULKAN_BUFFER_TYPE_UNIFORM) {
  1883. requiredMemoryPropertyFlags |=
  1884. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  1885. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
  1886. preferredMemoryPropertyFlags |=
  1887. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  1888. } else if (type == VULKAN_BUFFER_TYPE_TRANSFER) {
  1889. requiredMemoryPropertyFlags |=
  1890. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  1891. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
  1892. preferredMemoryPropertyFlags |=
  1893. VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
  1894. tolerableMemoryPropertyFlags |=
  1895. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  1896. } else {
  1897. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer type!");
  1898. return 0;
  1899. }
  1900. memoryTypesToTry = VULKAN_INTERNAL_FindBestBufferMemoryTypes(
  1901. renderer,
  1902. buffer,
  1903. requiredMemoryPropertyFlags,
  1904. preferredMemoryPropertyFlags,
  1905. tolerableMemoryPropertyFlags,
  1906. &memoryRequirements,
  1907. &memoryTypeCount);
  1908. for (i = 0; i < memoryTypeCount; i += 1) {
  1909. bindResult = VULKAN_INTERNAL_BindResourceMemory(
  1910. renderer,
  1911. memoryTypesToTry[i],
  1912. &memoryRequirements,
  1913. size,
  1914. buffer,
  1915. VK_NULL_HANDLE,
  1916. usedRegion);
  1917. if (bindResult == 1) {
  1918. selectedMemoryTypeIndex = memoryTypesToTry[i];
  1919. break;
  1920. }
  1921. }
  1922. SDL_free(memoryTypesToTry);
  1923. // Check for warnings on success
  1924. if (bindResult == 1) {
  1925. if (type == VULKAN_BUFFER_TYPE_GPU) {
  1926. if (!renderer->outOfDeviceLocalMemoryWarning) {
  1927. if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) {
  1928. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of device-local memory, allocating buffers on host-local memory, expect degraded performance!");
  1929. renderer->outOfDeviceLocalMemoryWarning = 1;
  1930. }
  1931. }
  1932. } else if (type == VULKAN_BUFFER_TYPE_UNIFORM) {
  1933. if (!renderer->outofBARMemoryWarning) {
  1934. if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == 0) {
  1935. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Out of BAR memory, allocating uniform buffers on host-local memory, expect degraded performance!");
  1936. renderer->outofBARMemoryWarning = 1;
  1937. }
  1938. }
  1939. } else if (type == VULKAN_BUFFER_TYPE_TRANSFER) {
  1940. if (!renderer->integratedMemoryNotification) {
  1941. if ((renderer->memoryProperties.memoryTypes[selectedMemoryTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) == VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
  1942. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Integrated memory detected, allocating TransferBuffers on device-local memory!");
  1943. renderer->integratedMemoryNotification = 1;
  1944. }
  1945. }
  1946. }
  1947. }
  1948. return bindResult;
  1949. }
  1950. // Resource tracking
  1951. #define ADD_TO_ARRAY_UNIQUE(resource, type, array, count, capacity) \
  1952. Uint32 i; \
  1953. \
  1954. for (i = 0; i < commandBuffer->count; i += 1) { \
  1955. if (commandBuffer->array[i] == resource) { \
  1956. return; \
  1957. } \
  1958. } \
  1959. \
  1960. if (commandBuffer->count == commandBuffer->capacity) { \
  1961. commandBuffer->capacity += 1; \
  1962. commandBuffer->array = SDL_realloc( \
  1963. commandBuffer->array, \
  1964. commandBuffer->capacity * sizeof(type)); \
  1965. } \
  1966. commandBuffer->array[commandBuffer->count] = resource; \
  1967. commandBuffer->count += 1;
  1968. #define TRACK_RESOURCE(resource, type, array, count, capacity) \
  1969. Uint32 i; \
  1970. \
  1971. for (i = 0; i < commandBuffer->count; i += 1) { \
  1972. if (commandBuffer->array[i] == resource) { \
  1973. return; \
  1974. } \
  1975. } \
  1976. \
  1977. if (commandBuffer->count == commandBuffer->capacity) { \
  1978. commandBuffer->capacity += 1; \
  1979. commandBuffer->array = SDL_realloc( \
  1980. commandBuffer->array, \
  1981. commandBuffer->capacity * sizeof(type)); \
  1982. } \
  1983. commandBuffer->array[commandBuffer->count] = resource; \
  1984. commandBuffer->count += 1; \
  1985. SDL_AtomicIncRef(&resource->referenceCount);
  1986. static void VULKAN_INTERNAL_TrackBuffer(
  1987. VulkanCommandBuffer *commandBuffer,
  1988. VulkanBuffer *buffer)
  1989. {
  1990. TRACK_RESOURCE(
  1991. buffer,
  1992. VulkanBuffer *,
  1993. usedBuffers,
  1994. usedBufferCount,
  1995. usedBufferCapacity)
  1996. }
  1997. static void VULKAN_INTERNAL_TrackTexture(
  1998. VulkanCommandBuffer *commandBuffer,
  1999. VulkanTexture *texture)
  2000. {
  2001. TRACK_RESOURCE(
  2002. texture,
  2003. VulkanTexture *,
  2004. usedTextures,
  2005. usedTextureCount,
  2006. usedTextureCapacity)
  2007. }
  2008. static void VULKAN_INTERNAL_TrackSampler(
  2009. VulkanCommandBuffer *commandBuffer,
  2010. VulkanSampler *sampler)
  2011. {
  2012. TRACK_RESOURCE(
  2013. sampler,
  2014. VulkanSampler *,
  2015. usedSamplers,
  2016. usedSamplerCount,
  2017. usedSamplerCapacity)
  2018. }
  2019. static void VULKAN_INTERNAL_TrackGraphicsPipeline(
  2020. VulkanCommandBuffer *commandBuffer,
  2021. VulkanGraphicsPipeline *graphicsPipeline)
  2022. {
  2023. TRACK_RESOURCE(
  2024. graphicsPipeline,
  2025. VulkanGraphicsPipeline *,
  2026. usedGraphicsPipelines,
  2027. usedGraphicsPipelineCount,
  2028. usedGraphicsPipelineCapacity)
  2029. }
  2030. static void VULKAN_INTERNAL_TrackComputePipeline(
  2031. VulkanCommandBuffer *commandBuffer,
  2032. VulkanComputePipeline *computePipeline)
  2033. {
  2034. TRACK_RESOURCE(
  2035. computePipeline,
  2036. VulkanComputePipeline *,
  2037. usedComputePipelines,
  2038. usedComputePipelineCount,
  2039. usedComputePipelineCapacity)
  2040. }
  2041. static void VULKAN_INTERNAL_TrackFramebuffer(
  2042. VulkanRenderer *renderer,
  2043. VulkanCommandBuffer *commandBuffer,
  2044. VulkanFramebuffer *framebuffer)
  2045. {
  2046. TRACK_RESOURCE(
  2047. framebuffer,
  2048. VulkanFramebuffer *,
  2049. usedFramebuffers,
  2050. usedFramebufferCount,
  2051. usedFramebufferCapacity);
  2052. }
  2053. static void VULKAN_INTERNAL_TrackUniformBuffer(
  2054. VulkanCommandBuffer *commandBuffer,
  2055. VulkanUniformBuffer *uniformBuffer)
  2056. {
  2057. Uint32 i;
  2058. for (i = 0; i < commandBuffer->usedUniformBufferCount; i += 1) {
  2059. if (commandBuffer->usedUniformBuffers[i] == uniformBuffer) {
  2060. return;
  2061. }
  2062. }
  2063. if (commandBuffer->usedUniformBufferCount == commandBuffer->usedUniformBufferCapacity) {
  2064. commandBuffer->usedUniformBufferCapacity += 1;
  2065. commandBuffer->usedUniformBuffers = SDL_realloc(
  2066. commandBuffer->usedUniformBuffers,
  2067. commandBuffer->usedUniformBufferCapacity * sizeof(VulkanUniformBuffer *));
  2068. }
  2069. commandBuffer->usedUniformBuffers[commandBuffer->usedUniformBufferCount] = uniformBuffer;
  2070. commandBuffer->usedUniformBufferCount += 1;
  2071. VULKAN_INTERNAL_TrackBuffer(
  2072. commandBuffer,
  2073. uniformBuffer->bufferHandle->vulkanBuffer);
  2074. }
  2075. #undef TRACK_RESOURCE
  2076. // Memory Barriers
  2077. /*
  2078. * In Vulkan, we must manually synchronize operations that write to resources on the GPU
  2079. * so that read-after-write, write-after-read, and write-after-write hazards do not occur.
  2080. * Additionally, textures are required to be in specific layouts for specific use cases.
  2081. * Both of these tasks are accomplished with vkCmdPipelineBarrier.
  2082. *
  2083. * To insert the correct barriers, we keep track of "usage modes" for buffers and textures.
  2084. * These indicate the current usage of that resource on the command buffer.
  2085. * The transition from one usage mode to another indicates how the barrier should be constructed.
  2086. *
  2087. * Pipeline barriers cannot be inserted during a render pass, but they can be inserted
  2088. * during a compute or copy pass.
  2089. *
  2090. * This means that the "default" usage mode of any given resource should be that it should be
  2091. * ready for a graphics-read operation, because we cannot barrier during a render pass.
  2092. * In the case where a resource is only used in compute, its default usage mode can be compute-read.
  2093. * This strategy allows us to avoid expensive record keeping of command buffer/resource usage mode pairs,
  2094. * and it fully covers synchronization between all combinations of stages.
  2095. *
  2096. * In Upload and Copy functions, we transition the resource immediately before and after the copy command.
  2097. *
  2098. * When binding a resource for compute, we transition when the Bind functions are called.
  2099. * If a bind slot containing a resource is overwritten, we transition the resource in that slot back to its default.
  2100. * When EndComputePass is called we transition all bound resources back to their default state.
  2101. *
  2102. * When binding a texture as a render pass attachment, we transition the resource on BeginRenderPass
  2103. * and transition it back to its default on EndRenderPass.
  2104. *
  2105. * This strategy imposes certain limitations on resource usage flags.
  2106. * For example, a texture cannot have both the SAMPLER and GRAPHICS_STORAGE usage flags,
  2107. * because then it is imposible for the backend to infer which default usage mode the texture should use.
  2108. *
  2109. * Sync hazards can be detected by setting VK_KHRONOS_VALIDATION_VALIDATE_SYNC=1 when using validation layers.
  2110. */
  2111. static void VULKAN_INTERNAL_BufferMemoryBarrier(
  2112. VulkanRenderer *renderer,
  2113. VulkanCommandBuffer *commandBuffer,
  2114. VulkanBufferUsageMode sourceUsageMode,
  2115. VulkanBufferUsageMode destinationUsageMode,
  2116. VulkanBuffer *buffer)
  2117. {
  2118. VkPipelineStageFlags srcStages = 0;
  2119. VkPipelineStageFlags dstStages = 0;
  2120. VkBufferMemoryBarrier memoryBarrier;
  2121. memoryBarrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
  2122. memoryBarrier.pNext = NULL;
  2123. memoryBarrier.srcAccessMask = 0;
  2124. memoryBarrier.dstAccessMask = 0;
  2125. memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2126. memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2127. memoryBarrier.buffer = buffer->buffer;
  2128. memoryBarrier.offset = 0;
  2129. memoryBarrier.size = buffer->size;
  2130. if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE) {
  2131. srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2132. memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2133. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION) {
  2134. srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2135. memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2136. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_VERTEX_READ) {
  2137. srcStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  2138. memoryBarrier.srcAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
  2139. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_INDEX_READ) {
  2140. srcStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  2141. memoryBarrier.srcAccessMask = VK_ACCESS_INDEX_READ_BIT;
  2142. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_INDIRECT) {
  2143. srcStages = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
  2144. memoryBarrier.srcAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
  2145. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ) {
  2146. srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2147. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2148. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ) {
  2149. srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2150. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2151. } else if (sourceUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
  2152. srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2153. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2154. } else {
  2155. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer source barrier type!");
  2156. return;
  2157. }
  2158. if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE) {
  2159. dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2160. memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2161. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION) {
  2162. dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2163. memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2164. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_VERTEX_READ) {
  2165. dstStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  2166. memoryBarrier.dstAccessMask = VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
  2167. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_INDEX_READ) {
  2168. dstStages = VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
  2169. memoryBarrier.dstAccessMask = VK_ACCESS_INDEX_READ_BIT;
  2170. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_INDIRECT) {
  2171. dstStages = VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
  2172. memoryBarrier.dstAccessMask = VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
  2173. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ) {
  2174. dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2175. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2176. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ) {
  2177. dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2178. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2179. } else if (destinationUsageMode == VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
  2180. dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2181. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2182. } else {
  2183. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized buffer destination barrier type!");
  2184. return;
  2185. }
  2186. renderer->vkCmdPipelineBarrier(
  2187. commandBuffer->commandBuffer,
  2188. srcStages,
  2189. dstStages,
  2190. 0,
  2191. 0,
  2192. NULL,
  2193. 1,
  2194. &memoryBarrier,
  2195. 0,
  2196. NULL);
  2197. buffer->transitioned = true;
  2198. }
  2199. static void VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
  2200. VulkanRenderer *renderer,
  2201. VulkanCommandBuffer *commandBuffer,
  2202. VulkanTextureUsageMode sourceUsageMode,
  2203. VulkanTextureUsageMode destinationUsageMode,
  2204. VulkanTextureSubresource *textureSubresource)
  2205. {
  2206. VkPipelineStageFlags srcStages = 0;
  2207. VkPipelineStageFlags dstStages = 0;
  2208. VkImageMemoryBarrier memoryBarrier;
  2209. memoryBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  2210. memoryBarrier.pNext = NULL;
  2211. memoryBarrier.srcAccessMask = 0;
  2212. memoryBarrier.dstAccessMask = 0;
  2213. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  2214. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  2215. memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2216. memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  2217. memoryBarrier.image = textureSubresource->parent->image;
  2218. memoryBarrier.subresourceRange.aspectMask = textureSubresource->parent->aspectFlags;
  2219. memoryBarrier.subresourceRange.baseArrayLayer = textureSubresource->layer;
  2220. memoryBarrier.subresourceRange.layerCount = 1;
  2221. memoryBarrier.subresourceRange.baseMipLevel = textureSubresource->level;
  2222. memoryBarrier.subresourceRange.levelCount = 1;
  2223. if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE) {
  2224. srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2225. memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2226. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2227. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION) {
  2228. srcStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2229. memoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2230. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2231. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_SAMPLER) {
  2232. srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2233. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2234. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  2235. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ) {
  2236. srcStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2237. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2238. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
  2239. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ) {
  2240. srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2241. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2242. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
  2243. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
  2244. srcStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2245. memoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2246. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_GENERAL;
  2247. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT) {
  2248. srcStages = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  2249. memoryBarrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  2250. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  2251. } else if (sourceUsageMode == VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT) {
  2252. srcStages = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  2253. memoryBarrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  2254. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  2255. } else {
  2256. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized texture source barrier type!");
  2257. return;
  2258. }
  2259. if (!textureSubresource->transitioned) {
  2260. memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  2261. }
  2262. if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE) {
  2263. dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2264. memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  2265. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
  2266. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION) {
  2267. dstStages = VK_PIPELINE_STAGE_TRANSFER_BIT;
  2268. memoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
  2269. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  2270. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_SAMPLER) {
  2271. dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2272. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2273. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  2274. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ) {
  2275. dstStages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  2276. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2277. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
  2278. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ) {
  2279. dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2280. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
  2281. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
  2282. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE) {
  2283. dstStages = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
  2284. memoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
  2285. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_GENERAL;
  2286. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT) {
  2287. dstStages = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  2288. memoryBarrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  2289. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  2290. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT) {
  2291. dstStages = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  2292. memoryBarrier.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  2293. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  2294. } else if (destinationUsageMode == VULKAN_TEXTURE_USAGE_MODE_PRESENT) {
  2295. dstStages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  2296. memoryBarrier.dstAccessMask = 0;
  2297. memoryBarrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
  2298. } else {
  2299. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized texture destination barrier type!");
  2300. return;
  2301. }
  2302. renderer->vkCmdPipelineBarrier(
  2303. commandBuffer->commandBuffer,
  2304. srcStages,
  2305. dstStages,
  2306. 0,
  2307. 0,
  2308. NULL,
  2309. 0,
  2310. NULL,
  2311. 1,
  2312. &memoryBarrier);
  2313. textureSubresource->transitioned = true;
  2314. }
  2315. static VulkanBufferUsageMode VULKAN_INTERNAL_DefaultBufferUsageMode(
  2316. VulkanBuffer *buffer)
  2317. {
  2318. // NOTE: order matters here!
  2319. if (buffer->usageFlags & SDL_GPU_BUFFERUSAGE_VERTEX_BIT) {
  2320. return VULKAN_BUFFER_USAGE_MODE_VERTEX_READ;
  2321. } else if (buffer->usageFlags & SDL_GPU_BUFFERUSAGE_INDEX_BIT) {
  2322. return VULKAN_BUFFER_USAGE_MODE_INDEX_READ;
  2323. } else if (buffer->usageFlags & SDL_GPU_BUFFERUSAGE_INDIRECT_BIT) {
  2324. return VULKAN_BUFFER_USAGE_MODE_INDIRECT;
  2325. } else if (buffer->usageFlags & SDL_GPU_BUFFERUSAGE_GRAPHICS_STORAGE_READ_BIT) {
  2326. return VULKAN_BUFFER_USAGE_MODE_GRAPHICS_STORAGE_READ;
  2327. } else if (buffer->usageFlags & SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_READ_BIT) {
  2328. return VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ;
  2329. } else if (buffer->usageFlags & SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_WRITE_BIT) {
  2330. return VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE;
  2331. } else {
  2332. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Buffer has no default usage mode!");
  2333. return VULKAN_BUFFER_USAGE_MODE_VERTEX_READ;
  2334. }
  2335. }
  2336. static VulkanTextureUsageMode VULKAN_INTERNAL_DefaultTextureUsageMode(
  2337. VulkanTexture *texture)
  2338. {
  2339. // NOTE: order matters here!
  2340. // NOTE: graphics storage bits and sampler bit are mutually exclusive!
  2341. if (texture->usageFlags & SDL_GPU_TEXTUREUSAGE_SAMPLER_BIT) {
  2342. return VULKAN_TEXTURE_USAGE_MODE_SAMPLER;
  2343. } else if (texture->usageFlags & SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ_BIT) {
  2344. return VULKAN_TEXTURE_USAGE_MODE_GRAPHICS_STORAGE_READ;
  2345. } else if (texture->usageFlags & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET_BIT) {
  2346. return VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT;
  2347. } else if (texture->usageFlags & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET_BIT) {
  2348. return VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT;
  2349. } else if (texture->usageFlags & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ_BIT) {
  2350. return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ;
  2351. } else if (texture->usageFlags & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE_BIT) {
  2352. return VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE;
  2353. } else {
  2354. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Texture has no default usage mode!");
  2355. return VULKAN_TEXTURE_USAGE_MODE_SAMPLER;
  2356. }
  2357. }
  2358. static void VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  2359. VulkanRenderer *renderer,
  2360. VulkanCommandBuffer *commandBuffer,
  2361. VulkanBufferUsageMode destinationUsageMode,
  2362. VulkanBuffer *buffer)
  2363. {
  2364. VULKAN_INTERNAL_BufferMemoryBarrier(
  2365. renderer,
  2366. commandBuffer,
  2367. VULKAN_INTERNAL_DefaultBufferUsageMode(buffer),
  2368. destinationUsageMode,
  2369. buffer);
  2370. }
  2371. static void VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  2372. VulkanRenderer *renderer,
  2373. VulkanCommandBuffer *commandBuffer,
  2374. VulkanBufferUsageMode sourceUsageMode,
  2375. VulkanBuffer *buffer)
  2376. {
  2377. VULKAN_INTERNAL_BufferMemoryBarrier(
  2378. renderer,
  2379. commandBuffer,
  2380. sourceUsageMode,
  2381. VULKAN_INTERNAL_DefaultBufferUsageMode(buffer),
  2382. buffer);
  2383. }
  2384. static void VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  2385. VulkanRenderer *renderer,
  2386. VulkanCommandBuffer *commandBuffer,
  2387. VulkanTextureUsageMode destinationUsageMode,
  2388. VulkanTextureSubresource *textureSubresource)
  2389. {
  2390. VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
  2391. renderer,
  2392. commandBuffer,
  2393. VULKAN_INTERNAL_DefaultTextureUsageMode(textureSubresource->parent),
  2394. destinationUsageMode,
  2395. textureSubresource);
  2396. }
  2397. static void VULKAN_INTERNAL_TextureTransitionFromDefaultUsage(
  2398. VulkanRenderer *renderer,
  2399. VulkanCommandBuffer *commandBuffer,
  2400. VulkanTextureUsageMode destinationUsageMode,
  2401. VulkanTexture *texture)
  2402. {
  2403. for (Uint32 i = 0; i < texture->subresourceCount; i += 1) {
  2404. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  2405. renderer,
  2406. commandBuffer,
  2407. destinationUsageMode,
  2408. &texture->subresources[i]);
  2409. }
  2410. }
  2411. static void VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  2412. VulkanRenderer *renderer,
  2413. VulkanCommandBuffer *commandBuffer,
  2414. VulkanTextureUsageMode sourceUsageMode,
  2415. VulkanTextureSubresource *textureSubresource)
  2416. {
  2417. VULKAN_INTERNAL_TextureSubresourceMemoryBarrier(
  2418. renderer,
  2419. commandBuffer,
  2420. sourceUsageMode,
  2421. VULKAN_INTERNAL_DefaultTextureUsageMode(textureSubresource->parent),
  2422. textureSubresource);
  2423. }
  2424. static void VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
  2425. VulkanRenderer *renderer,
  2426. VulkanCommandBuffer *commandBuffer,
  2427. VulkanTextureUsageMode sourceUsageMode,
  2428. VulkanTexture *texture)
  2429. {
  2430. // FIXME: could optimize this barrier
  2431. for (Uint32 i = 0; i < texture->subresourceCount; i += 1) {
  2432. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  2433. renderer,
  2434. commandBuffer,
  2435. sourceUsageMode,
  2436. &texture->subresources[i]);
  2437. }
  2438. }
  2439. // Resource Disposal
  2440. static void VULKAN_INTERNAL_ReleaseFramebuffer(
  2441. VulkanRenderer *renderer,
  2442. VulkanFramebuffer *framebuffer)
  2443. {
  2444. SDL_LockMutex(renderer->disposeLock);
  2445. EXPAND_ARRAY_IF_NEEDED(
  2446. renderer->framebuffersToDestroy,
  2447. VulkanFramebuffer *,
  2448. renderer->framebuffersToDestroyCount + 1,
  2449. renderer->framebuffersToDestroyCapacity,
  2450. renderer->framebuffersToDestroyCapacity * 2)
  2451. renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount] = framebuffer;
  2452. renderer->framebuffersToDestroyCount += 1;
  2453. SDL_UnlockMutex(renderer->disposeLock);
  2454. }
  2455. static void VULKAN_INTERNAL_DestroyFramebuffer(
  2456. VulkanRenderer *renderer,
  2457. VulkanFramebuffer *framebuffer)
  2458. {
  2459. renderer->vkDestroyFramebuffer(
  2460. renderer->logicalDevice,
  2461. framebuffer->framebuffer,
  2462. NULL);
  2463. SDL_free(framebuffer);
  2464. }
  2465. static void VULKAN_INTERNAL_RemoveFramebuffersContainingView(
  2466. VulkanRenderer *renderer,
  2467. VkImageView view)
  2468. {
  2469. FramebufferHashTableKey *key;
  2470. VulkanFramebuffer *value;
  2471. void *iter = NULL;
  2472. // Can't remove while iterating!
  2473. Uint32 keysToRemoveCapacity = 8;
  2474. Uint32 keysToRemoveCount = 0;
  2475. FramebufferHashTableKey **keysToRemove = SDL_malloc(keysToRemoveCapacity * sizeof(FramebufferHashTableKey *));
  2476. SDL_LockMutex(renderer->framebufferFetchLock);
  2477. while (SDL_IterateHashTable(renderer->framebufferHashTable, (const void **)&key, (const void **)&value, &iter)) {
  2478. bool remove = false;
  2479. for (Uint32 i = 0; i < key->colorAttachmentCount; i += 1) {
  2480. if (key->colorAttachmentViews[i] == view) {
  2481. remove = true;
  2482. }
  2483. }
  2484. if (key->depthStencilAttachmentView == view) {
  2485. remove = true;
  2486. }
  2487. if (remove) {
  2488. if (keysToRemoveCount == keysToRemoveCapacity) {
  2489. keysToRemoveCapacity *= 2;
  2490. keysToRemove = SDL_realloc(
  2491. keysToRemove,
  2492. keysToRemoveCapacity * sizeof(FramebufferHashTableKey *));
  2493. }
  2494. keysToRemove[keysToRemoveCount] = key;
  2495. keysToRemoveCount += 1;
  2496. }
  2497. }
  2498. for (Uint32 i = 0; i < keysToRemoveCount; i += 1) {
  2499. SDL_RemoveFromHashTable(renderer->framebufferHashTable, (void *)keysToRemove[i]);
  2500. }
  2501. SDL_UnlockMutex(renderer->framebufferFetchLock);
  2502. SDL_free(keysToRemove);
  2503. }
  2504. static void VULKAN_INTERNAL_DestroyTexture(
  2505. VulkanRenderer *renderer,
  2506. VulkanTexture *texture)
  2507. {
  2508. // Clean up subresources
  2509. for (Uint32 subresourceIndex = 0; subresourceIndex < texture->subresourceCount; subresourceIndex += 1) {
  2510. if (texture->usageFlags & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET_BIT) {
  2511. for (Uint32 depthIndex = 0; depthIndex < texture->depth; depthIndex += 1) {
  2512. VULKAN_INTERNAL_RemoveFramebuffersContainingView(
  2513. renderer,
  2514. texture->subresources[subresourceIndex].renderTargetViews[depthIndex]);
  2515. }
  2516. if (texture->subresources[subresourceIndex].msaaTexHandle != NULL) {
  2517. VULKAN_INTERNAL_DestroyTexture(
  2518. renderer,
  2519. texture->subresources[subresourceIndex].msaaTexHandle->vulkanTexture);
  2520. SDL_free(texture->subresources[subresourceIndex].msaaTexHandle);
  2521. }
  2522. for (Uint32 depthIndex = 0; depthIndex < texture->depth; depthIndex += 1) {
  2523. renderer->vkDestroyImageView(
  2524. renderer->logicalDevice,
  2525. texture->subresources[subresourceIndex].renderTargetViews[depthIndex],
  2526. NULL);
  2527. }
  2528. SDL_free(texture->subresources[subresourceIndex].renderTargetViews);
  2529. }
  2530. if (texture->usageFlags & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE_BIT) {
  2531. renderer->vkDestroyImageView(
  2532. renderer->logicalDevice,
  2533. texture->subresources[subresourceIndex].computeWriteView,
  2534. NULL);
  2535. }
  2536. if (texture->usageFlags & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET_BIT) {
  2537. renderer->vkDestroyImageView(
  2538. renderer->logicalDevice,
  2539. texture->subresources[subresourceIndex].depthStencilView,
  2540. NULL);
  2541. }
  2542. }
  2543. SDL_free(texture->subresources);
  2544. renderer->vkDestroyImageView(
  2545. renderer->logicalDevice,
  2546. texture->fullView,
  2547. NULL);
  2548. renderer->vkDestroyImage(
  2549. renderer->logicalDevice,
  2550. texture->image,
  2551. NULL);
  2552. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  2553. renderer,
  2554. texture->usedRegion);
  2555. SDL_free(texture);
  2556. }
  2557. static void VULKAN_INTERNAL_DestroyBuffer(
  2558. VulkanRenderer *renderer,
  2559. VulkanBuffer *buffer)
  2560. {
  2561. renderer->vkDestroyBuffer(
  2562. renderer->logicalDevice,
  2563. buffer->buffer,
  2564. NULL);
  2565. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  2566. renderer,
  2567. buffer->usedRegion);
  2568. SDL_free(buffer);
  2569. }
  2570. static void VULKAN_INTERNAL_DestroyCommandPool(
  2571. VulkanRenderer *renderer,
  2572. VulkanCommandPool *commandPool)
  2573. {
  2574. Uint32 i;
  2575. VulkanCommandBuffer *commandBuffer;
  2576. renderer->vkDestroyCommandPool(
  2577. renderer->logicalDevice,
  2578. commandPool->commandPool,
  2579. NULL);
  2580. for (i = 0; i < commandPool->inactiveCommandBufferCount; i += 1) {
  2581. commandBuffer = commandPool->inactiveCommandBuffers[i];
  2582. SDL_free(commandBuffer->presentDatas);
  2583. SDL_free(commandBuffer->waitSemaphores);
  2584. SDL_free(commandBuffer->signalSemaphores);
  2585. SDL_free(commandBuffer->boundDescriptorSetDatas);
  2586. SDL_free(commandBuffer->usedBuffers);
  2587. SDL_free(commandBuffer->usedTextures);
  2588. SDL_free(commandBuffer->usedSamplers);
  2589. SDL_free(commandBuffer->usedGraphicsPipelines);
  2590. SDL_free(commandBuffer->usedComputePipelines);
  2591. SDL_free(commandBuffer->usedFramebuffers);
  2592. SDL_free(commandBuffer->usedUniformBuffers);
  2593. SDL_free(commandBuffer);
  2594. }
  2595. SDL_free(commandPool->inactiveCommandBuffers);
  2596. SDL_free(commandPool);
  2597. }
  2598. static void VULKAN_INTERNAL_DestroyDescriptorSetPool(
  2599. VulkanRenderer *renderer,
  2600. DescriptorSetPool *pool)
  2601. {
  2602. Uint32 i;
  2603. if (pool == NULL) {
  2604. return;
  2605. }
  2606. for (i = 0; i < pool->descriptorPoolCount; i += 1) {
  2607. renderer->vkDestroyDescriptorPool(
  2608. renderer->logicalDevice,
  2609. pool->descriptorPools[i],
  2610. NULL);
  2611. }
  2612. renderer->vkDestroyDescriptorSetLayout(
  2613. renderer->logicalDevice,
  2614. pool->descriptorSetLayout,
  2615. NULL);
  2616. SDL_free(pool->descriptorInfos);
  2617. SDL_free(pool->descriptorPools);
  2618. SDL_free(pool->inactiveDescriptorSets);
  2619. SDL_DestroyMutex(pool->lock);
  2620. }
  2621. static void VULKAN_INTERNAL_DestroyGraphicsPipeline(
  2622. VulkanRenderer *renderer,
  2623. VulkanGraphicsPipeline *graphicsPipeline)
  2624. {
  2625. Uint32 i;
  2626. renderer->vkDestroyPipeline(
  2627. renderer->logicalDevice,
  2628. graphicsPipeline->pipeline,
  2629. NULL);
  2630. renderer->vkDestroyPipelineLayout(
  2631. renderer->logicalDevice,
  2632. graphicsPipeline->resourceLayout.pipelineLayout,
  2633. NULL);
  2634. for (i = 0; i < 4; i += 1) {
  2635. VULKAN_INTERNAL_DestroyDescriptorSetPool(
  2636. renderer,
  2637. &graphicsPipeline->resourceLayout.descriptorSetPools[i]);
  2638. }
  2639. (void)SDL_AtomicDecRef(&graphicsPipeline->vertexShader->referenceCount);
  2640. (void)SDL_AtomicDecRef(&graphicsPipeline->fragmentShader->referenceCount);
  2641. SDL_free(graphicsPipeline);
  2642. }
  2643. static void VULKAN_INTERNAL_DestroyComputePipeline(
  2644. VulkanRenderer *renderer,
  2645. VulkanComputePipeline *computePipeline)
  2646. {
  2647. Uint32 i;
  2648. renderer->vkDestroyPipeline(
  2649. renderer->logicalDevice,
  2650. computePipeline->pipeline,
  2651. NULL);
  2652. renderer->vkDestroyPipelineLayout(
  2653. renderer->logicalDevice,
  2654. computePipeline->resourceLayout.pipelineLayout,
  2655. NULL);
  2656. for (i = 0; i < 3; i += 1) {
  2657. VULKAN_INTERNAL_DestroyDescriptorSetPool(
  2658. renderer,
  2659. &computePipeline->resourceLayout.descriptorSetPools[i]);
  2660. }
  2661. renderer->vkDestroyShaderModule(
  2662. renderer->logicalDevice,
  2663. computePipeline->shaderModule,
  2664. NULL);
  2665. SDL_free(computePipeline);
  2666. }
  2667. static void VULKAN_INTERNAL_DestroyShader(
  2668. VulkanRenderer *renderer,
  2669. VulkanShader *vulkanShader)
  2670. {
  2671. renderer->vkDestroyShaderModule(
  2672. renderer->logicalDevice,
  2673. vulkanShader->shaderModule,
  2674. NULL);
  2675. SDL_free((void *)vulkanShader->entryPointName);
  2676. SDL_free(vulkanShader);
  2677. }
  2678. static void VULKAN_INTERNAL_DestroySampler(
  2679. VulkanRenderer *renderer,
  2680. VulkanSampler *vulkanSampler)
  2681. {
  2682. renderer->vkDestroySampler(
  2683. renderer->logicalDevice,
  2684. vulkanSampler->sampler,
  2685. NULL);
  2686. SDL_free(vulkanSampler);
  2687. }
  2688. static void VULKAN_INTERNAL_DestroySwapchain(
  2689. VulkanRenderer *renderer,
  2690. WindowData *windowData)
  2691. {
  2692. Uint32 i;
  2693. VulkanSwapchainData *swapchainData;
  2694. if (windowData == NULL) {
  2695. return;
  2696. }
  2697. swapchainData = windowData->swapchainData;
  2698. if (swapchainData == NULL) {
  2699. return;
  2700. }
  2701. for (i = 0; i < swapchainData->imageCount; i += 1) {
  2702. VULKAN_INTERNAL_RemoveFramebuffersContainingView(
  2703. renderer,
  2704. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources[0].renderTargetViews[0]);
  2705. renderer->vkDestroyImageView(
  2706. renderer->logicalDevice,
  2707. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources[0].renderTargetViews[0],
  2708. NULL);
  2709. SDL_free(swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources[0].renderTargetViews);
  2710. SDL_free(swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources);
  2711. SDL_free(swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture);
  2712. SDL_free(swapchainData->textureContainers[i].activeTextureHandle);
  2713. }
  2714. SDL_free(swapchainData->textureContainers);
  2715. renderer->vkDestroySwapchainKHR(
  2716. renderer->logicalDevice,
  2717. swapchainData->swapchain,
  2718. NULL);
  2719. renderer->vkDestroySurfaceKHR(
  2720. renderer->instance,
  2721. swapchainData->surface,
  2722. NULL);
  2723. for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
  2724. renderer->vkDestroySemaphore(
  2725. renderer->logicalDevice,
  2726. swapchainData->imageAvailableSemaphore[i],
  2727. NULL);
  2728. renderer->vkDestroySemaphore(
  2729. renderer->logicalDevice,
  2730. swapchainData->renderFinishedSemaphore[i],
  2731. NULL);
  2732. }
  2733. windowData->swapchainData = NULL;
  2734. SDL_free(swapchainData);
  2735. }
  2736. // Hashtable functions
  2737. static Uint32 VULKAN_INTERNAL_CommandPoolHashFunction(const void *key, void *data)
  2738. {
  2739. return (Uint32)((CommandPoolHashTableKey *)key)->threadID;
  2740. }
  2741. static bool VULKAN_INTERNAL_CommandPoolHashKeyMatch(const void *aKey, const void *bKey, void *data)
  2742. {
  2743. CommandPoolHashTableKey *a = (CommandPoolHashTableKey *)aKey;
  2744. CommandPoolHashTableKey *b = (CommandPoolHashTableKey *)bKey;
  2745. return a->threadID == b->threadID;
  2746. }
  2747. static void VULKAN_INTERNAL_CommandPoolHashNuke(const void *key, const void *value, void *data)
  2748. {
  2749. VulkanRenderer *renderer = (VulkanRenderer *)data;
  2750. VulkanCommandPool *pool = (VulkanCommandPool *)value;
  2751. VULKAN_INTERNAL_DestroyCommandPool(renderer, pool);
  2752. SDL_free((void *)key);
  2753. }
  2754. static Uint32 VULKAN_INTERNAL_RenderPassHashFunction(
  2755. const void *key,
  2756. void *data)
  2757. {
  2758. RenderPassHashTableKey *hashTableKey = (RenderPassHashTableKey *)key;
  2759. /* The algorithm for this hashing function
  2760. * is taken from Josh Bloch's "Effective Java".
  2761. * (https://stackoverflow.com/a/113600/12492383)
  2762. */
  2763. const Uint32 HASH_FACTOR = 31;
  2764. Uint32 result = 1;
  2765. for (Uint32 i = 0; i < hashTableKey->colorAttachmentCount; i += 1) {
  2766. result = result * HASH_FACTOR + hashTableKey->colorTargetDescriptions[i].loadOp;
  2767. result = result * HASH_FACTOR + hashTableKey->colorTargetDescriptions[i].storeOp;
  2768. result = result * HASH_FACTOR + hashTableKey->colorTargetDescriptions[i].format;
  2769. }
  2770. result = result * HASH_FACTOR + hashTableKey->depthStencilTargetDescription.loadOp;
  2771. result = result * HASH_FACTOR + hashTableKey->depthStencilTargetDescription.storeOp;
  2772. result = result * HASH_FACTOR + hashTableKey->depthStencilTargetDescription.stencilLoadOp;
  2773. result = result * HASH_FACTOR + hashTableKey->depthStencilTargetDescription.stencilStoreOp;
  2774. result = result * HASH_FACTOR + hashTableKey->depthStencilTargetDescription.format;
  2775. result = result * HASH_FACTOR + hashTableKey->colorAttachmentSampleCount;
  2776. return result;
  2777. }
  2778. static bool VULKAN_INTERNAL_RenderPassHashKeyMatch(
  2779. const void *aKey,
  2780. const void *bKey,
  2781. void *data)
  2782. {
  2783. RenderPassHashTableKey *a = (RenderPassHashTableKey *)aKey;
  2784. RenderPassHashTableKey *b = (RenderPassHashTableKey *)bKey;
  2785. if (a->colorAttachmentCount != b->colorAttachmentCount) {
  2786. return 0;
  2787. }
  2788. if (a->colorAttachmentSampleCount != b->colorAttachmentSampleCount) {
  2789. return 0;
  2790. }
  2791. for (Uint32 i = 0; i < a->colorAttachmentCount; i += 1) {
  2792. if (a->colorTargetDescriptions[i].format != b->colorTargetDescriptions[i].format) {
  2793. return 0;
  2794. }
  2795. if (a->colorTargetDescriptions[i].loadOp != b->colorTargetDescriptions[i].loadOp) {
  2796. return 0;
  2797. }
  2798. if (a->colorTargetDescriptions[i].storeOp != b->colorTargetDescriptions[i].storeOp) {
  2799. return 0;
  2800. }
  2801. }
  2802. if (a->depthStencilTargetDescription.format != b->depthStencilTargetDescription.format) {
  2803. return 0;
  2804. }
  2805. if (a->depthStencilTargetDescription.loadOp != b->depthStencilTargetDescription.loadOp) {
  2806. return 0;
  2807. }
  2808. if (a->depthStencilTargetDescription.storeOp != b->depthStencilTargetDescription.storeOp) {
  2809. return 0;
  2810. }
  2811. if (a->depthStencilTargetDescription.stencilLoadOp != b->depthStencilTargetDescription.stencilLoadOp) {
  2812. return 0;
  2813. }
  2814. if (a->depthStencilTargetDescription.stencilStoreOp != b->depthStencilTargetDescription.stencilStoreOp) {
  2815. return 0;
  2816. }
  2817. return 1;
  2818. }
  2819. static void VULKAN_INTERNAL_RenderPassHashNuke(const void *key, const void *value, void *data)
  2820. {
  2821. VulkanRenderer *renderer = (VulkanRenderer *)data;
  2822. VulkanRenderPassHashTableValue *renderPassWrapper = (VulkanRenderPassHashTableValue *)value;
  2823. renderer->vkDestroyRenderPass(
  2824. renderer->logicalDevice,
  2825. renderPassWrapper->handle,
  2826. NULL);
  2827. SDL_free(renderPassWrapper);
  2828. SDL_free((void *)key);
  2829. }
  2830. static Uint32 VULKAN_INTERNAL_FramebufferHashFunction(
  2831. const void *key,
  2832. void *data)
  2833. {
  2834. FramebufferHashTableKey *hashTableKey = (FramebufferHashTableKey *)key;
  2835. /* The algorithm for this hashing function
  2836. * is taken from Josh Bloch's "Effective Java".
  2837. * (https://stackoverflow.com/a/113600/12492383)
  2838. */
  2839. const Uint32 HASH_FACTOR = 31;
  2840. Uint32 result = 1;
  2841. for (Uint32 i = 0; i < hashTableKey->colorAttachmentCount; i += 1) {
  2842. result = result * HASH_FACTOR + (Uint32)(uintptr_t)hashTableKey->colorAttachmentViews[i];
  2843. result = result * HASH_FACTOR + (Uint32)(uintptr_t)hashTableKey->colorMultiSampleAttachmentViews[i];
  2844. }
  2845. result = result * HASH_FACTOR + (Uint32)(uintptr_t)hashTableKey->depthStencilAttachmentView;
  2846. result = result * HASH_FACTOR + hashTableKey->width;
  2847. result = result * HASH_FACTOR + hashTableKey->height;
  2848. return result;
  2849. }
  2850. static bool VULKAN_INTERNAL_FramebufferHashKeyMatch(
  2851. const void *aKey,
  2852. const void *bKey,
  2853. void *data)
  2854. {
  2855. FramebufferHashTableKey *a = (FramebufferHashTableKey *)aKey;
  2856. FramebufferHashTableKey *b = (FramebufferHashTableKey *)bKey;
  2857. if (a->colorAttachmentCount != b->colorAttachmentCount) {
  2858. return 0;
  2859. }
  2860. for (Uint32 i = 0; i < a->colorAttachmentCount; i += 1) {
  2861. if (a->colorAttachmentViews[i] != b->colorAttachmentViews[i]) {
  2862. return 0;
  2863. }
  2864. if (a->colorMultiSampleAttachmentViews[i] != b->colorMultiSampleAttachmentViews[i]) {
  2865. return 0;
  2866. }
  2867. }
  2868. if (a->depthStencilAttachmentView != b->depthStencilAttachmentView) {
  2869. return 0;
  2870. }
  2871. if (a->width != b->width) {
  2872. return 0;
  2873. }
  2874. if (a->height != b->height) {
  2875. return 0;
  2876. }
  2877. return 1;
  2878. }
  2879. static void VULKAN_INTERNAL_FramebufferHashNuke(const void *key, const void *value, void *data)
  2880. {
  2881. VulkanRenderer *renderer = (VulkanRenderer *)data;
  2882. VulkanFramebuffer *framebuffer = (VulkanFramebuffer *)value;
  2883. VULKAN_INTERNAL_ReleaseFramebuffer(renderer, framebuffer);
  2884. SDL_free((void *)key);
  2885. }
  2886. // Descriptor pool stuff
  2887. static bool VULKAN_INTERNAL_CreateDescriptorPool(
  2888. VulkanRenderer *renderer,
  2889. VulkanDescriptorInfo *descriptorInfos,
  2890. Uint32 descriptorInfoCount,
  2891. Uint32 descriptorSetPoolSize,
  2892. VkDescriptorPool *pDescriptorPool)
  2893. {
  2894. VkDescriptorPoolSize *descriptorPoolSizes;
  2895. VkDescriptorPoolCreateInfo descriptorPoolInfo;
  2896. VkResult vulkanResult;
  2897. Uint32 i;
  2898. descriptorPoolSizes = NULL;
  2899. if (descriptorInfoCount > 0) {
  2900. descriptorPoolSizes = SDL_stack_alloc(VkDescriptorPoolSize, descriptorInfoCount);
  2901. for (i = 0; i < descriptorInfoCount; i += 1) {
  2902. descriptorPoolSizes[i].type = descriptorInfos[i].descriptorType;
  2903. descriptorPoolSizes[i].descriptorCount = descriptorSetPoolSize;
  2904. }
  2905. }
  2906. descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  2907. descriptorPoolInfo.pNext = NULL;
  2908. descriptorPoolInfo.flags = 0;
  2909. descriptorPoolInfo.maxSets = descriptorSetPoolSize;
  2910. descriptorPoolInfo.poolSizeCount = descriptorInfoCount;
  2911. descriptorPoolInfo.pPoolSizes = descriptorPoolSizes;
  2912. vulkanResult = renderer->vkCreateDescriptorPool(
  2913. renderer->logicalDevice,
  2914. &descriptorPoolInfo,
  2915. NULL,
  2916. pDescriptorPool);
  2917. SDL_stack_free(descriptorPoolSizes);
  2918. if (vulkanResult != VK_SUCCESS) {
  2919. LogVulkanResultAsError("vkCreateDescriptorPool", vulkanResult);
  2920. return false;
  2921. }
  2922. return true;
  2923. }
  2924. static bool VULKAN_INTERNAL_AllocateDescriptorSets(
  2925. VulkanRenderer *renderer,
  2926. VkDescriptorPool descriptorPool,
  2927. VkDescriptorSetLayout descriptorSetLayout,
  2928. Uint32 descriptorSetCount,
  2929. VkDescriptorSet *descriptorSetArray)
  2930. {
  2931. VkDescriptorSetAllocateInfo descriptorSetAllocateInfo;
  2932. VkDescriptorSetLayout *descriptorSetLayouts = SDL_stack_alloc(VkDescriptorSetLayout, descriptorSetCount);
  2933. VkResult vulkanResult;
  2934. Uint32 i;
  2935. for (i = 0; i < descriptorSetCount; i += 1) {
  2936. descriptorSetLayouts[i] = descriptorSetLayout;
  2937. }
  2938. descriptorSetAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  2939. descriptorSetAllocateInfo.pNext = NULL;
  2940. descriptorSetAllocateInfo.descriptorPool = descriptorPool;
  2941. descriptorSetAllocateInfo.descriptorSetCount = descriptorSetCount;
  2942. descriptorSetAllocateInfo.pSetLayouts = descriptorSetLayouts;
  2943. vulkanResult = renderer->vkAllocateDescriptorSets(
  2944. renderer->logicalDevice,
  2945. &descriptorSetAllocateInfo,
  2946. descriptorSetArray);
  2947. if (vulkanResult != VK_SUCCESS) {
  2948. LogVulkanResultAsError("vkAllocateDescriptorSets", vulkanResult);
  2949. SDL_stack_free(descriptorSetLayouts);
  2950. return false;
  2951. }
  2952. SDL_stack_free(descriptorSetLayouts);
  2953. return true;
  2954. }
  2955. static void VULKAN_INTERNAL_InitializeDescriptorSetPool(
  2956. VulkanRenderer *renderer,
  2957. DescriptorSetPool *descriptorSetPool)
  2958. {
  2959. descriptorSetPool->lock = SDL_CreateMutex();
  2960. // Descriptor set layout and descriptor infos are already set when this function is called
  2961. descriptorSetPool->descriptorPoolCount = 1;
  2962. descriptorSetPool->descriptorPools = SDL_malloc(sizeof(VkDescriptorPool));
  2963. descriptorSetPool->nextPoolSize = DESCRIPTOR_POOL_STARTING_SIZE * 2;
  2964. VULKAN_INTERNAL_CreateDescriptorPool(
  2965. renderer,
  2966. descriptorSetPool->descriptorInfos,
  2967. descriptorSetPool->descriptorInfoCount,
  2968. DESCRIPTOR_POOL_STARTING_SIZE,
  2969. &descriptorSetPool->descriptorPools[0]);
  2970. descriptorSetPool->inactiveDescriptorSetCapacity = DESCRIPTOR_POOL_STARTING_SIZE;
  2971. descriptorSetPool->inactiveDescriptorSetCount = DESCRIPTOR_POOL_STARTING_SIZE;
  2972. descriptorSetPool->inactiveDescriptorSets = SDL_malloc(
  2973. sizeof(VkDescriptorSet) * DESCRIPTOR_POOL_STARTING_SIZE);
  2974. VULKAN_INTERNAL_AllocateDescriptorSets(
  2975. renderer,
  2976. descriptorSetPool->descriptorPools[0],
  2977. descriptorSetPool->descriptorSetLayout,
  2978. DESCRIPTOR_POOL_STARTING_SIZE,
  2979. descriptorSetPool->inactiveDescriptorSets);
  2980. }
  2981. static bool VULKAN_INTERNAL_InitializeGraphicsPipelineResourceLayout(
  2982. VulkanRenderer *renderer,
  2983. VulkanShader *vertexShader,
  2984. VulkanShader *fragmentShader,
  2985. VulkanGraphicsPipelineResourceLayout *pipelineResourceLayout)
  2986. {
  2987. VkDescriptorSetLayoutBinding descriptorSetLayoutBindings[MAX_TEXTURE_SAMPLERS_PER_STAGE + MAX_STORAGE_TEXTURES_PER_STAGE + MAX_STORAGE_BUFFERS_PER_STAGE];
  2988. VkDescriptorSetLayoutCreateInfo descriptorSetLayoutCreateInfo;
  2989. VkDescriptorSetLayout descriptorSetLayouts[4];
  2990. VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
  2991. DescriptorSetPool *descriptorSetPool;
  2992. VkResult vulkanResult;
  2993. Uint32 i;
  2994. pipelineResourceLayout->vertexSamplerCount = vertexShader->samplerCount;
  2995. pipelineResourceLayout->vertexStorageTextureCount = vertexShader->storageTextureCount;
  2996. pipelineResourceLayout->vertexStorageBufferCount = vertexShader->storageBufferCount;
  2997. pipelineResourceLayout->vertexUniformBufferCount = vertexShader->uniformBufferCount;
  2998. pipelineResourceLayout->fragmentSamplerCount = fragmentShader->samplerCount;
  2999. pipelineResourceLayout->fragmentStorageTextureCount = fragmentShader->storageTextureCount;
  3000. pipelineResourceLayout->fragmentStorageBufferCount = fragmentShader->storageBufferCount;
  3001. pipelineResourceLayout->fragmentUniformBufferCount = fragmentShader->uniformBufferCount;
  3002. // Vertex Resources
  3003. descriptorSetLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  3004. descriptorSetLayoutCreateInfo.pNext = NULL;
  3005. descriptorSetLayoutCreateInfo.flags = 0;
  3006. descriptorSetLayoutCreateInfo.pBindings = NULL;
  3007. descriptorSetLayoutCreateInfo.bindingCount =
  3008. vertexShader->samplerCount +
  3009. vertexShader->storageTextureCount +
  3010. vertexShader->storageBufferCount;
  3011. descriptorSetPool = &pipelineResourceLayout->descriptorSetPools[0];
  3012. descriptorSetPool->descriptorInfoCount = descriptorSetLayoutCreateInfo.bindingCount;
  3013. descriptorSetPool->descriptorInfos = NULL;
  3014. if (descriptorSetLayoutCreateInfo.bindingCount > 0) {
  3015. descriptorSetPool->descriptorInfos = SDL_malloc(
  3016. descriptorSetPool->descriptorInfoCount * sizeof(VulkanDescriptorInfo));
  3017. for (i = 0; i < vertexShader->samplerCount; i += 1) {
  3018. descriptorSetLayoutBindings[i].binding = i;
  3019. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3020. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3021. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
  3022. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3023. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3024. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_VERTEX_BIT;
  3025. }
  3026. for (i = vertexShader->samplerCount; i < vertexShader->samplerCount + vertexShader->storageTextureCount; i += 1) {
  3027. descriptorSetLayoutBindings[i].binding = i;
  3028. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3029. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3030. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
  3031. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3032. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3033. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_VERTEX_BIT;
  3034. }
  3035. for (i = vertexShader->samplerCount + vertexShader->storageTextureCount; i < descriptorSetLayoutCreateInfo.bindingCount; i += 1) {
  3036. descriptorSetLayoutBindings[i].binding = i;
  3037. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3038. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3039. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
  3040. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3041. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3042. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_VERTEX_BIT;
  3043. }
  3044. descriptorSetLayoutCreateInfo.pBindings = descriptorSetLayoutBindings;
  3045. }
  3046. vulkanResult = renderer->vkCreateDescriptorSetLayout(
  3047. renderer->logicalDevice,
  3048. &descriptorSetLayoutCreateInfo,
  3049. NULL,
  3050. &descriptorSetPool->descriptorSetLayout);
  3051. descriptorSetLayouts[0] = descriptorSetPool->descriptorSetLayout;
  3052. if (vulkanResult != VK_SUCCESS) {
  3053. LogVulkanResultAsError("vkCreateDescriptorSetLayout", vulkanResult);
  3054. return false;
  3055. }
  3056. // Vertex UBOs
  3057. descriptorSetPool = &pipelineResourceLayout->descriptorSetPools[1];
  3058. descriptorSetLayoutCreateInfo.bindingCount = pipelineResourceLayout->vertexUniformBufferCount;
  3059. descriptorSetLayoutCreateInfo.pBindings = NULL;
  3060. descriptorSetPool->descriptorInfoCount = descriptorSetLayoutCreateInfo.bindingCount;
  3061. descriptorSetPool->descriptorInfos = NULL;
  3062. if (descriptorSetLayoutCreateInfo.bindingCount > 0) {
  3063. descriptorSetPool->descriptorInfos = SDL_malloc(
  3064. descriptorSetPool->descriptorInfoCount * sizeof(VulkanDescriptorInfo));
  3065. for (i = 0; i < vertexShader->uniformBufferCount; i += 1) {
  3066. descriptorSetLayoutBindings[i].binding = i;
  3067. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3068. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  3069. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
  3070. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3071. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  3072. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_VERTEX_BIT;
  3073. }
  3074. descriptorSetLayoutCreateInfo.pBindings = descriptorSetLayoutBindings;
  3075. }
  3076. vulkanResult = renderer->vkCreateDescriptorSetLayout(
  3077. renderer->logicalDevice,
  3078. &descriptorSetLayoutCreateInfo,
  3079. NULL,
  3080. &descriptorSetPool->descriptorSetLayout);
  3081. descriptorSetLayouts[1] = descriptorSetPool->descriptorSetLayout;
  3082. if (vulkanResult != VK_SUCCESS) {
  3083. LogVulkanResultAsError("vkCreateDescriptorSetLayout", vulkanResult);
  3084. return false;
  3085. }
  3086. // Fragment resources
  3087. descriptorSetPool = &pipelineResourceLayout->descriptorSetPools[2];
  3088. descriptorSetLayoutCreateInfo.bindingCount =
  3089. fragmentShader->samplerCount +
  3090. fragmentShader->storageTextureCount +
  3091. fragmentShader->storageBufferCount;
  3092. descriptorSetLayoutCreateInfo.pBindings = NULL;
  3093. descriptorSetPool->descriptorInfoCount = descriptorSetLayoutCreateInfo.bindingCount;
  3094. descriptorSetPool->descriptorInfos = NULL;
  3095. if (descriptorSetLayoutCreateInfo.bindingCount > 0) {
  3096. descriptorSetPool->descriptorInfos = SDL_malloc(
  3097. descriptorSetPool->descriptorInfoCount * sizeof(VulkanDescriptorInfo));
  3098. for (i = 0; i < fragmentShader->samplerCount; i += 1) {
  3099. descriptorSetLayoutBindings[i].binding = i;
  3100. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3101. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3102. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
  3103. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3104. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  3105. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_FRAGMENT_BIT;
  3106. }
  3107. for (i = fragmentShader->samplerCount; i < fragmentShader->samplerCount + fragmentShader->storageTextureCount; i += 1) {
  3108. descriptorSetLayoutBindings[i].binding = i;
  3109. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3110. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3111. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
  3112. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3113. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3114. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_FRAGMENT_BIT;
  3115. }
  3116. for (i = fragmentShader->samplerCount + fragmentShader->storageTextureCount; i < descriptorSetLayoutCreateInfo.bindingCount; i += 1) {
  3117. descriptorSetLayoutBindings[i].binding = i;
  3118. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3119. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3120. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
  3121. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3122. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3123. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_FRAGMENT_BIT;
  3124. }
  3125. descriptorSetLayoutCreateInfo.pBindings = descriptorSetLayoutBindings;
  3126. }
  3127. vulkanResult = renderer->vkCreateDescriptorSetLayout(
  3128. renderer->logicalDevice,
  3129. &descriptorSetLayoutCreateInfo,
  3130. NULL,
  3131. &descriptorSetPool->descriptorSetLayout);
  3132. descriptorSetLayouts[2] = descriptorSetPool->descriptorSetLayout;
  3133. if (vulkanResult != VK_SUCCESS) {
  3134. LogVulkanResultAsError("vkCreateDescriptorSetLayout", vulkanResult);
  3135. return false;
  3136. }
  3137. // Fragment UBOs
  3138. descriptorSetPool = &pipelineResourceLayout->descriptorSetPools[3];
  3139. descriptorSetLayoutCreateInfo.bindingCount =
  3140. pipelineResourceLayout->fragmentUniformBufferCount;
  3141. descriptorSetLayoutCreateInfo.pBindings = NULL;
  3142. descriptorSetPool->descriptorInfoCount = descriptorSetLayoutCreateInfo.bindingCount;
  3143. descriptorSetPool->descriptorInfos = NULL;
  3144. if (descriptorSetLayoutCreateInfo.bindingCount > 0) {
  3145. descriptorSetPool->descriptorInfos = SDL_malloc(
  3146. descriptorSetPool->descriptorInfoCount * sizeof(VulkanDescriptorInfo));
  3147. for (i = 0; i < fragmentShader->uniformBufferCount; i += 1) {
  3148. descriptorSetLayoutBindings[i].binding = i;
  3149. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3150. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  3151. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
  3152. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3153. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  3154. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_FRAGMENT_BIT;
  3155. }
  3156. descriptorSetLayoutCreateInfo.pBindings = descriptorSetLayoutBindings;
  3157. }
  3158. vulkanResult = renderer->vkCreateDescriptorSetLayout(
  3159. renderer->logicalDevice,
  3160. &descriptorSetLayoutCreateInfo,
  3161. NULL,
  3162. &descriptorSetPool->descriptorSetLayout);
  3163. descriptorSetLayouts[3] = descriptorSetPool->descriptorSetLayout;
  3164. if (vulkanResult != VK_SUCCESS) {
  3165. LogVulkanResultAsError("vkCreateDescriptorSetLayout", vulkanResult);
  3166. return false;
  3167. }
  3168. // Create the pipeline layout
  3169. pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
  3170. pipelineLayoutCreateInfo.pNext = NULL;
  3171. pipelineLayoutCreateInfo.flags = 0;
  3172. pipelineLayoutCreateInfo.setLayoutCount = 4;
  3173. pipelineLayoutCreateInfo.pSetLayouts = descriptorSetLayouts;
  3174. pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
  3175. pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
  3176. vulkanResult = renderer->vkCreatePipelineLayout(
  3177. renderer->logicalDevice,
  3178. &pipelineLayoutCreateInfo,
  3179. NULL,
  3180. &pipelineResourceLayout->pipelineLayout);
  3181. if (vulkanResult != VK_SUCCESS) {
  3182. LogVulkanResultAsError("vkCreatePipelineLayout", vulkanResult);
  3183. return false;
  3184. }
  3185. for (i = 0; i < 4; i += 1) {
  3186. VULKAN_INTERNAL_InitializeDescriptorSetPool(
  3187. renderer,
  3188. &pipelineResourceLayout->descriptorSetPools[i]);
  3189. }
  3190. return true;
  3191. }
  3192. static bool VULKAN_INTERNAL_InitializeComputePipelineResourceLayout(
  3193. VulkanRenderer *renderer,
  3194. SDL_GPUComputePipelineCreateInfo *pipelineCreateInfo,
  3195. VulkanComputePipelineResourceLayout *pipelineResourceLayout)
  3196. {
  3197. VkDescriptorSetLayoutBinding descriptorSetLayoutBindings[MAX_UNIFORM_BUFFERS_PER_STAGE];
  3198. VkDescriptorSetLayoutCreateInfo descriptorSetLayoutCreateInfo;
  3199. VkDescriptorSetLayout descriptorSetLayouts[3];
  3200. VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
  3201. DescriptorSetPool *descriptorSetPool;
  3202. VkResult vulkanResult;
  3203. Uint32 i;
  3204. pipelineResourceLayout->readOnlyStorageTextureCount = pipelineCreateInfo->readOnlyStorageTextureCount;
  3205. pipelineResourceLayout->readOnlyStorageBufferCount = pipelineCreateInfo->readOnlyStorageBufferCount;
  3206. pipelineResourceLayout->writeOnlyStorageTextureCount = pipelineCreateInfo->writeOnlyStorageTextureCount;
  3207. pipelineResourceLayout->writeOnlyStorageBufferCount = pipelineCreateInfo->writeOnlyStorageBufferCount;
  3208. pipelineResourceLayout->uniformBufferCount = pipelineCreateInfo->uniformBufferCount;
  3209. // Read-only resources
  3210. descriptorSetLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  3211. descriptorSetLayoutCreateInfo.pNext = NULL;
  3212. descriptorSetLayoutCreateInfo.flags = 0;
  3213. descriptorSetLayoutCreateInfo.pBindings = NULL;
  3214. descriptorSetLayoutCreateInfo.bindingCount =
  3215. pipelineCreateInfo->readOnlyStorageTextureCount +
  3216. pipelineCreateInfo->readOnlyStorageBufferCount;
  3217. descriptorSetPool = &pipelineResourceLayout->descriptorSetPools[0];
  3218. descriptorSetPool->descriptorInfoCount = descriptorSetLayoutCreateInfo.bindingCount;
  3219. descriptorSetPool->descriptorInfos = NULL;
  3220. if (descriptorSetLayoutCreateInfo.bindingCount > 0) {
  3221. descriptorSetPool->descriptorInfos = SDL_malloc(
  3222. descriptorSetPool->descriptorInfoCount * sizeof(VulkanDescriptorInfo));
  3223. for (i = 0; i < pipelineCreateInfo->readOnlyStorageTextureCount; i += 1) {
  3224. descriptorSetLayoutBindings[i].binding = i;
  3225. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3226. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3227. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
  3228. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3229. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3230. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_COMPUTE_BIT;
  3231. }
  3232. for (i = pipelineCreateInfo->readOnlyStorageTextureCount; i < descriptorSetLayoutCreateInfo.bindingCount; i += 1) {
  3233. descriptorSetLayoutBindings[i].binding = i;
  3234. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3235. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3236. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
  3237. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3238. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3239. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_COMPUTE_BIT;
  3240. }
  3241. descriptorSetLayoutCreateInfo.pBindings = descriptorSetLayoutBindings;
  3242. }
  3243. vulkanResult = renderer->vkCreateDescriptorSetLayout(
  3244. renderer->logicalDevice,
  3245. &descriptorSetLayoutCreateInfo,
  3246. NULL,
  3247. &descriptorSetPool->descriptorSetLayout);
  3248. descriptorSetLayouts[0] = descriptorSetPool->descriptorSetLayout;
  3249. if (vulkanResult != VK_SUCCESS) {
  3250. LogVulkanResultAsError("vkCreateDescriptorSetLayout", vulkanResult);
  3251. return false;
  3252. }
  3253. // Write-only resources
  3254. descriptorSetLayoutCreateInfo.bindingCount =
  3255. pipelineCreateInfo->writeOnlyStorageTextureCount +
  3256. pipelineCreateInfo->writeOnlyStorageBufferCount;
  3257. descriptorSetLayoutCreateInfo.pBindings = NULL;
  3258. descriptorSetPool = &pipelineResourceLayout->descriptorSetPools[1];
  3259. descriptorSetPool->descriptorInfoCount = descriptorSetLayoutCreateInfo.bindingCount;
  3260. descriptorSetPool->descriptorInfos = NULL;
  3261. if (descriptorSetLayoutCreateInfo.bindingCount > 0) {
  3262. descriptorSetPool->descriptorInfos = SDL_malloc(
  3263. descriptorSetPool->descriptorInfoCount * sizeof(VulkanDescriptorInfo));
  3264. for (i = 0; i < pipelineCreateInfo->writeOnlyStorageTextureCount; i += 1) {
  3265. descriptorSetLayoutBindings[i].binding = i;
  3266. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3267. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3268. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
  3269. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3270. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  3271. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_COMPUTE_BIT;
  3272. }
  3273. for (i = pipelineCreateInfo->writeOnlyStorageTextureCount; i < descriptorSetLayoutCreateInfo.bindingCount; i += 1) {
  3274. descriptorSetLayoutBindings[i].binding = i;
  3275. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3276. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3277. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
  3278. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3279. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  3280. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_COMPUTE_BIT;
  3281. }
  3282. descriptorSetLayoutCreateInfo.pBindings = descriptorSetLayoutBindings;
  3283. }
  3284. vulkanResult = renderer->vkCreateDescriptorSetLayout(
  3285. renderer->logicalDevice,
  3286. &descriptorSetLayoutCreateInfo,
  3287. NULL,
  3288. &descriptorSetPool->descriptorSetLayout);
  3289. descriptorSetLayouts[1] = descriptorSetPool->descriptorSetLayout;
  3290. if (vulkanResult != VK_SUCCESS) {
  3291. LogVulkanResultAsError("vkCreateDescriptorSetLayout", vulkanResult);
  3292. return false;
  3293. }
  3294. // Uniform buffers
  3295. descriptorSetPool = &pipelineResourceLayout->descriptorSetPools[2];
  3296. descriptorSetLayoutCreateInfo.bindingCount = pipelineCreateInfo->uniformBufferCount;
  3297. descriptorSetLayoutCreateInfo.pBindings = NULL;
  3298. descriptorSetPool->descriptorInfoCount = descriptorSetLayoutCreateInfo.bindingCount;
  3299. descriptorSetPool->descriptorInfos = NULL;
  3300. if (descriptorSetLayoutCreateInfo.bindingCount > 0) {
  3301. descriptorSetPool->descriptorInfos = SDL_malloc(
  3302. descriptorSetPool->descriptorInfoCount * sizeof(VulkanDescriptorInfo));
  3303. for (i = 0; i < pipelineCreateInfo->uniformBufferCount; i += 1) {
  3304. descriptorSetLayoutBindings[i].binding = i;
  3305. descriptorSetLayoutBindings[i].descriptorCount = 1;
  3306. descriptorSetLayoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  3307. descriptorSetLayoutBindings[i].stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
  3308. descriptorSetLayoutBindings[i].pImmutableSamplers = NULL;
  3309. descriptorSetPool->descriptorInfos[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  3310. descriptorSetPool->descriptorInfos[i].stageFlag = VK_SHADER_STAGE_COMPUTE_BIT;
  3311. }
  3312. descriptorSetLayoutCreateInfo.pBindings = descriptorSetLayoutBindings;
  3313. }
  3314. vulkanResult = renderer->vkCreateDescriptorSetLayout(
  3315. renderer->logicalDevice,
  3316. &descriptorSetLayoutCreateInfo,
  3317. NULL,
  3318. &descriptorSetPool->descriptorSetLayout);
  3319. descriptorSetLayouts[2] = descriptorSetPool->descriptorSetLayout;
  3320. if (vulkanResult != VK_SUCCESS) {
  3321. LogVulkanResultAsError("vkCreateDescriptorSetLayout", vulkanResult);
  3322. return false;
  3323. }
  3324. // Create the pipeline layout
  3325. pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
  3326. pipelineLayoutCreateInfo.pNext = NULL;
  3327. pipelineLayoutCreateInfo.flags = 0;
  3328. pipelineLayoutCreateInfo.setLayoutCount = 3;
  3329. pipelineLayoutCreateInfo.pSetLayouts = descriptorSetLayouts;
  3330. pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
  3331. pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
  3332. vulkanResult = renderer->vkCreatePipelineLayout(
  3333. renderer->logicalDevice,
  3334. &pipelineLayoutCreateInfo,
  3335. NULL,
  3336. &pipelineResourceLayout->pipelineLayout);
  3337. if (vulkanResult != VK_SUCCESS) {
  3338. LogVulkanResultAsError("vkCreatePipelineLayout", vulkanResult);
  3339. return false;
  3340. }
  3341. for (i = 0; i < 3; i += 1) {
  3342. VULKAN_INTERNAL_InitializeDescriptorSetPool(
  3343. renderer,
  3344. &pipelineResourceLayout->descriptorSetPools[i]);
  3345. }
  3346. return true;
  3347. }
  3348. // Data Buffer
  3349. static VulkanBuffer *VULKAN_INTERNAL_CreateBuffer(
  3350. VulkanRenderer *renderer,
  3351. VkDeviceSize size,
  3352. SDL_GPUBufferUsageFlags usageFlags,
  3353. VulkanBufferType type)
  3354. {
  3355. VulkanBuffer *buffer;
  3356. VkResult vulkanResult;
  3357. VkBufferCreateInfo bufferCreateInfo;
  3358. VkBufferUsageFlags vulkanUsageFlags = 0;
  3359. Uint8 bindResult;
  3360. if (usageFlags & SDL_GPU_BUFFERUSAGE_VERTEX_BIT) {
  3361. vulkanUsageFlags |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
  3362. }
  3363. if (usageFlags & SDL_GPU_BUFFERUSAGE_INDEX_BIT) {
  3364. vulkanUsageFlags |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
  3365. }
  3366. if (usageFlags & (SDL_GPU_BUFFERUSAGE_GRAPHICS_STORAGE_READ_BIT |
  3367. SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_READ_BIT |
  3368. SDL_GPU_BUFFERUSAGE_COMPUTE_STORAGE_WRITE_BIT)) {
  3369. vulkanUsageFlags |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
  3370. }
  3371. if (usageFlags & SDL_GPU_BUFFERUSAGE_INDIRECT_BIT) {
  3372. vulkanUsageFlags |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
  3373. }
  3374. if (type == VULKAN_BUFFER_TYPE_UNIFORM) {
  3375. vulkanUsageFlags |= VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
  3376. } else {
  3377. // GPU buffers need transfer bits for defrag, transfer buffers need them for transfers
  3378. vulkanUsageFlags |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
  3379. }
  3380. buffer = SDL_malloc(sizeof(VulkanBuffer));
  3381. buffer->size = size;
  3382. buffer->usageFlags = usageFlags;
  3383. buffer->type = type;
  3384. buffer->markedForDestroy = 0;
  3385. buffer->transitioned = false;
  3386. bufferCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  3387. bufferCreateInfo.pNext = NULL;
  3388. bufferCreateInfo.flags = 0;
  3389. bufferCreateInfo.size = size;
  3390. bufferCreateInfo.usage = vulkanUsageFlags;
  3391. bufferCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  3392. bufferCreateInfo.queueFamilyIndexCount = 1;
  3393. bufferCreateInfo.pQueueFamilyIndices = &renderer->queueFamilyIndex;
  3394. // Set transfer bits so we can defrag
  3395. bufferCreateInfo.usage |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
  3396. vulkanResult = renderer->vkCreateBuffer(
  3397. renderer->logicalDevice,
  3398. &bufferCreateInfo,
  3399. NULL,
  3400. &buffer->buffer);
  3401. VULKAN_ERROR_CHECK(vulkanResult, vkCreateBuffer, 0)
  3402. bindResult = VULKAN_INTERNAL_BindMemoryForBuffer(
  3403. renderer,
  3404. buffer->buffer,
  3405. buffer->size,
  3406. buffer->type,
  3407. &buffer->usedRegion);
  3408. if (bindResult != 1) {
  3409. renderer->vkDestroyBuffer(
  3410. renderer->logicalDevice,
  3411. buffer->buffer,
  3412. NULL);
  3413. return NULL;
  3414. }
  3415. buffer->usedRegion->vulkanBuffer = buffer; // lol
  3416. buffer->handle = NULL;
  3417. SDL_AtomicSet(&buffer->referenceCount, 0);
  3418. return buffer;
  3419. }
  3420. // Indirection so we can cleanly defrag buffers
  3421. static VulkanBufferHandle *VULKAN_INTERNAL_CreateBufferHandle(
  3422. VulkanRenderer *renderer,
  3423. VkDeviceSize sizeInBytes,
  3424. SDL_GPUBufferUsageFlags usageFlags,
  3425. VulkanBufferType type)
  3426. {
  3427. VulkanBufferHandle *bufferHandle;
  3428. VulkanBuffer *buffer;
  3429. buffer = VULKAN_INTERNAL_CreateBuffer(
  3430. renderer,
  3431. sizeInBytes,
  3432. usageFlags,
  3433. type);
  3434. if (buffer == NULL) {
  3435. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create buffer!");
  3436. return NULL;
  3437. }
  3438. bufferHandle = SDL_malloc(sizeof(VulkanBufferHandle));
  3439. bufferHandle->vulkanBuffer = buffer;
  3440. bufferHandle->container = NULL;
  3441. buffer->handle = bufferHandle;
  3442. return bufferHandle;
  3443. }
  3444. static VulkanBufferContainer *VULKAN_INTERNAL_CreateBufferContainer(
  3445. VulkanRenderer *renderer,
  3446. VkDeviceSize sizeInBytes,
  3447. SDL_GPUBufferUsageFlags usageFlags,
  3448. VulkanBufferType type)
  3449. {
  3450. VulkanBufferContainer *bufferContainer;
  3451. VulkanBufferHandle *bufferHandle;
  3452. bufferHandle = VULKAN_INTERNAL_CreateBufferHandle(
  3453. renderer,
  3454. sizeInBytes,
  3455. usageFlags,
  3456. type);
  3457. if (bufferHandle == NULL) {
  3458. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create buffer container!");
  3459. return NULL;
  3460. }
  3461. bufferContainer = SDL_malloc(sizeof(VulkanBufferContainer));
  3462. bufferContainer->activeBufferHandle = bufferHandle;
  3463. bufferHandle->container = bufferContainer;
  3464. bufferContainer->bufferCapacity = 1;
  3465. bufferContainer->bufferCount = 1;
  3466. bufferContainer->bufferHandles = SDL_malloc(
  3467. bufferContainer->bufferCapacity * sizeof(VulkanBufferHandle *));
  3468. bufferContainer->bufferHandles[0] = bufferContainer->activeBufferHandle;
  3469. bufferContainer->debugName = NULL;
  3470. return bufferContainer;
  3471. }
  3472. // Texture Subresource Utilities
  3473. static Uint32 VULKAN_INTERNAL_GetTextureSubresourceIndex(
  3474. Uint32 mipLevel,
  3475. Uint32 layer,
  3476. Uint32 numLevels)
  3477. {
  3478. return mipLevel + (layer * numLevels);
  3479. }
  3480. static VulkanTextureSubresource *VULKAN_INTERNAL_FetchTextureSubresource(
  3481. VulkanTextureContainer *textureContainer,
  3482. Uint32 layer,
  3483. Uint32 level)
  3484. {
  3485. Uint32 index = VULKAN_INTERNAL_GetTextureSubresourceIndex(
  3486. level,
  3487. layer,
  3488. textureContainer->header.info.levelCount);
  3489. return &textureContainer->activeTextureHandle->vulkanTexture->subresources[index];
  3490. }
  3491. static void VULKAN_INTERNAL_CreateRenderTargetView(
  3492. VulkanRenderer *renderer,
  3493. VulkanTexture *texture,
  3494. Uint32 layerOrDepth,
  3495. Uint32 level,
  3496. VkComponentMapping swizzle,
  3497. VkImageView *pView)
  3498. {
  3499. VkResult vulkanResult;
  3500. VkImageViewCreateInfo imageViewCreateInfo;
  3501. // create framebuffer compatible views for RenderTarget
  3502. imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  3503. imageViewCreateInfo.pNext = NULL;
  3504. imageViewCreateInfo.flags = 0;
  3505. imageViewCreateInfo.image = texture->image;
  3506. imageViewCreateInfo.format = texture->format;
  3507. imageViewCreateInfo.components = swizzle;
  3508. imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags;
  3509. imageViewCreateInfo.subresourceRange.baseMipLevel = level;
  3510. imageViewCreateInfo.subresourceRange.levelCount = 1;
  3511. imageViewCreateInfo.subresourceRange.baseArrayLayer = layerOrDepth;
  3512. imageViewCreateInfo.subresourceRange.layerCount = 1;
  3513. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
  3514. vulkanResult = renderer->vkCreateImageView(
  3515. renderer->logicalDevice,
  3516. &imageViewCreateInfo,
  3517. NULL,
  3518. pView);
  3519. if (vulkanResult != VK_SUCCESS) {
  3520. LogVulkanResultAsError(
  3521. "vkCreateImageView",
  3522. vulkanResult);
  3523. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create color attachment image view");
  3524. *pView = (VkImageView)VK_NULL_HANDLE;
  3525. return;
  3526. }
  3527. }
  3528. static void VULKAN_INTERNAL_CreateSubresourceView(
  3529. VulkanRenderer *renderer,
  3530. VulkanTexture *texture,
  3531. Uint32 layer,
  3532. Uint32 level,
  3533. VkComponentMapping swizzle,
  3534. VkImageView *pView)
  3535. {
  3536. VkResult vulkanResult;
  3537. VkImageViewCreateInfo imageViewCreateInfo;
  3538. // create framebuffer compatible views for RenderTarget
  3539. imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  3540. imageViewCreateInfo.pNext = NULL;
  3541. imageViewCreateInfo.flags = 0;
  3542. imageViewCreateInfo.image = texture->image;
  3543. imageViewCreateInfo.format = texture->format;
  3544. imageViewCreateInfo.components = swizzle;
  3545. imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags;
  3546. imageViewCreateInfo.subresourceRange.baseMipLevel = level;
  3547. imageViewCreateInfo.subresourceRange.levelCount = 1;
  3548. imageViewCreateInfo.subresourceRange.baseArrayLayer = layer;
  3549. imageViewCreateInfo.subresourceRange.layerCount = 1;
  3550. imageViewCreateInfo.viewType = texture->depth > 1 ? VK_IMAGE_VIEW_TYPE_3D : VK_IMAGE_VIEW_TYPE_2D;
  3551. vulkanResult = renderer->vkCreateImageView(
  3552. renderer->logicalDevice,
  3553. &imageViewCreateInfo,
  3554. NULL,
  3555. pView);
  3556. if (vulkanResult != VK_SUCCESS) {
  3557. LogVulkanResultAsError(
  3558. "vkCreateImageView",
  3559. vulkanResult);
  3560. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create color attachment image view");
  3561. *pView = (VkImageView)VK_NULL_HANDLE;
  3562. return;
  3563. }
  3564. }
  3565. // Swapchain
  3566. static Uint8 VULKAN_INTERNAL_QuerySwapchainSupport(
  3567. VulkanRenderer *renderer,
  3568. VkPhysicalDevice physicalDevice,
  3569. VkSurfaceKHR surface,
  3570. SwapchainSupportDetails *outputDetails)
  3571. {
  3572. VkResult result;
  3573. VkBool32 supportsPresent;
  3574. renderer->vkGetPhysicalDeviceSurfaceSupportKHR(
  3575. physicalDevice,
  3576. renderer->queueFamilyIndex,
  3577. surface,
  3578. &supportsPresent);
  3579. // Initialize these in case anything fails
  3580. outputDetails->formatsLength = 0;
  3581. outputDetails->presentModesLength = 0;
  3582. if (!supportsPresent) {
  3583. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "This surface does not support presenting!");
  3584. return 0;
  3585. }
  3586. // Run the device surface queries
  3587. result = renderer->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
  3588. physicalDevice,
  3589. surface,
  3590. &outputDetails->capabilities);
  3591. VULKAN_ERROR_CHECK(result, vkGetPhysicalDeviceSurfaceCapabilitiesKHR, 0)
  3592. if (!(outputDetails->capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR)) {
  3593. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Opaque presentation unsupported! Expect weird transparency bugs!");
  3594. }
  3595. result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
  3596. physicalDevice,
  3597. surface,
  3598. &outputDetails->formatsLength,
  3599. NULL);
  3600. VULKAN_ERROR_CHECK(result, vkGetPhysicalDeviceSurfaceFormatsKHR, 0)
  3601. result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
  3602. physicalDevice,
  3603. surface,
  3604. &outputDetails->presentModesLength,
  3605. NULL);
  3606. VULKAN_ERROR_CHECK(result, vkGetPhysicalDeviceSurfacePresentModesKHR, 0)
  3607. // Generate the arrays, if applicable
  3608. outputDetails->formats = NULL;
  3609. if (outputDetails->formatsLength != 0) {
  3610. outputDetails->formats = (VkSurfaceFormatKHR *)SDL_malloc(
  3611. sizeof(VkSurfaceFormatKHR) * outputDetails->formatsLength);
  3612. if (!outputDetails->formats) {
  3613. return 0;
  3614. }
  3615. result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
  3616. physicalDevice,
  3617. surface,
  3618. &outputDetails->formatsLength,
  3619. outputDetails->formats);
  3620. if (result != VK_SUCCESS) {
  3621. SDL_LogError(
  3622. SDL_LOG_CATEGORY_GPU,
  3623. "vkGetPhysicalDeviceSurfaceFormatsKHR: %s",
  3624. VkErrorMessages(result));
  3625. SDL_free(outputDetails->formats);
  3626. return 0;
  3627. }
  3628. }
  3629. outputDetails->presentModes = NULL;
  3630. if (outputDetails->presentModesLength != 0) {
  3631. outputDetails->presentModes = (VkPresentModeKHR *)SDL_malloc(
  3632. sizeof(VkPresentModeKHR) * outputDetails->presentModesLength);
  3633. if (!outputDetails->presentModes) {
  3634. SDL_free(outputDetails->formats);
  3635. return 0;
  3636. }
  3637. result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
  3638. physicalDevice,
  3639. surface,
  3640. &outputDetails->presentModesLength,
  3641. outputDetails->presentModes);
  3642. if (result != VK_SUCCESS) {
  3643. SDL_LogError(
  3644. SDL_LOG_CATEGORY_GPU,
  3645. "vkGetPhysicalDeviceSurfacePresentModesKHR: %s",
  3646. VkErrorMessages(result));
  3647. SDL_free(outputDetails->formats);
  3648. SDL_free(outputDetails->presentModes);
  3649. return 0;
  3650. }
  3651. }
  3652. /* If we made it here, all the queries were successful. This does NOT
  3653. * necessarily mean there are any supported formats or present modes!
  3654. */
  3655. return 1;
  3656. }
  3657. static bool VULKAN_INTERNAL_VerifySwapSurfaceFormat(
  3658. VkFormat desiredFormat,
  3659. VkColorSpaceKHR desiredColorSpace,
  3660. VkSurfaceFormatKHR *availableFormats,
  3661. Uint32 availableFormatsLength)
  3662. {
  3663. Uint32 i;
  3664. for (i = 0; i < availableFormatsLength; i += 1) {
  3665. if (availableFormats[i].format == desiredFormat &&
  3666. availableFormats[i].colorSpace == desiredColorSpace) {
  3667. return true;
  3668. }
  3669. }
  3670. return false;
  3671. }
  3672. static bool VULKAN_INTERNAL_VerifySwapPresentMode(
  3673. VkPresentModeKHR presentMode,
  3674. VkPresentModeKHR *availablePresentModes,
  3675. Uint32 availablePresentModesLength)
  3676. {
  3677. Uint32 i;
  3678. for (i = 0; i < availablePresentModesLength; i += 1) {
  3679. if (availablePresentModes[i] == presentMode) {
  3680. return true;
  3681. }
  3682. }
  3683. return false;
  3684. }
  3685. static bool VULKAN_INTERNAL_CreateSwapchain(
  3686. VulkanRenderer *renderer,
  3687. WindowData *windowData)
  3688. {
  3689. VkResult vulkanResult;
  3690. VulkanSwapchainData *swapchainData;
  3691. VkSwapchainCreateInfoKHR swapchainCreateInfo;
  3692. VkImage *swapchainImages;
  3693. VkSemaphoreCreateInfo semaphoreCreateInfo;
  3694. SwapchainSupportDetails swapchainSupportDetails;
  3695. bool hasValidSwapchainComposition, hasValidPresentMode;
  3696. Sint32 drawableWidth, drawableHeight;
  3697. Uint32 i;
  3698. SDL_VideoDevice *_this = SDL_GetVideoDevice();
  3699. SDL_assert(_this && _this->Vulkan_CreateSurface);
  3700. swapchainData = SDL_malloc(sizeof(VulkanSwapchainData));
  3701. swapchainData->frameCounter = 0;
  3702. // Each swapchain must have its own surface.
  3703. if (!_this->Vulkan_CreateSurface(
  3704. _this,
  3705. windowData->window,
  3706. renderer->instance,
  3707. NULL, // FIXME: VAllocationCallbacks
  3708. &swapchainData->surface)) {
  3709. SDL_free(swapchainData);
  3710. SDL_LogError(
  3711. SDL_LOG_CATEGORY_GPU,
  3712. "Vulkan_CreateSurface failed: %s",
  3713. SDL_GetError());
  3714. return false;
  3715. }
  3716. if (!VULKAN_INTERNAL_QuerySwapchainSupport(
  3717. renderer,
  3718. renderer->physicalDevice,
  3719. swapchainData->surface,
  3720. &swapchainSupportDetails)) {
  3721. renderer->vkDestroySurfaceKHR(
  3722. renderer->instance,
  3723. swapchainData->surface,
  3724. NULL);
  3725. if (swapchainSupportDetails.formatsLength > 0) {
  3726. SDL_free(swapchainSupportDetails.formats);
  3727. }
  3728. if (swapchainSupportDetails.presentModesLength > 0) {
  3729. SDL_free(swapchainSupportDetails.presentModes);
  3730. }
  3731. SDL_free(swapchainData);
  3732. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Device does not support swap chain creation");
  3733. return false;
  3734. }
  3735. if (swapchainSupportDetails.capabilities.currentExtent.width == 0 ||
  3736. swapchainSupportDetails.capabilities.currentExtent.height == 0) {
  3737. // Not an error, just minimize behavior!
  3738. renderer->vkDestroySurfaceKHR(
  3739. renderer->instance,
  3740. swapchainData->surface,
  3741. NULL);
  3742. if (swapchainSupportDetails.formatsLength > 0) {
  3743. SDL_free(swapchainSupportDetails.formats);
  3744. }
  3745. if (swapchainSupportDetails.presentModesLength > 0) {
  3746. SDL_free(swapchainSupportDetails.presentModes);
  3747. }
  3748. SDL_free(swapchainData);
  3749. return false;
  3750. }
  3751. // Verify that we can use the requested composition and present mode
  3752. swapchainData->format = SwapchainCompositionToFormat[windowData->swapchainComposition];
  3753. swapchainData->colorSpace = SwapchainCompositionToColorSpace[windowData->swapchainComposition];
  3754. swapchainData->swapchainSwizzle = SwapchainCompositionSwizzle[windowData->swapchainComposition];
  3755. swapchainData->usingFallbackFormat = false;
  3756. hasValidSwapchainComposition = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
  3757. swapchainData->format,
  3758. swapchainData->colorSpace,
  3759. swapchainSupportDetails.formats,
  3760. swapchainSupportDetails.formatsLength);
  3761. if (!hasValidSwapchainComposition) {
  3762. // Let's try again with the fallback format...
  3763. swapchainData->format = SwapchainCompositionToFallbackFormat[windowData->swapchainComposition];
  3764. swapchainData->usingFallbackFormat = true;
  3765. hasValidSwapchainComposition = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
  3766. swapchainData->format,
  3767. swapchainData->colorSpace,
  3768. swapchainSupportDetails.formats,
  3769. swapchainSupportDetails.formatsLength);
  3770. }
  3771. swapchainData->presentMode = SDLToVK_PresentMode[windowData->presentMode];
  3772. hasValidPresentMode = VULKAN_INTERNAL_VerifySwapPresentMode(
  3773. swapchainData->presentMode,
  3774. swapchainSupportDetails.presentModes,
  3775. swapchainSupportDetails.presentModesLength);
  3776. if (!hasValidSwapchainComposition || !hasValidPresentMode) {
  3777. renderer->vkDestroySurfaceKHR(
  3778. renderer->instance,
  3779. swapchainData->surface,
  3780. NULL);
  3781. if (swapchainSupportDetails.formatsLength > 0) {
  3782. SDL_free(swapchainSupportDetails.formats);
  3783. }
  3784. if (swapchainSupportDetails.presentModesLength > 0) {
  3785. SDL_free(swapchainSupportDetails.presentModes);
  3786. }
  3787. SDL_free(swapchainData);
  3788. if (!hasValidSwapchainComposition) {
  3789. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Device does not support requested swapchain composition!");
  3790. }
  3791. if (!hasValidPresentMode) {
  3792. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Device does not support requested presentMode!");
  3793. }
  3794. return false;
  3795. }
  3796. // Sync now to be sure that our swapchain size is correct
  3797. SDL_SyncWindow(windowData->window);
  3798. SDL_GetWindowSizeInPixels(
  3799. windowData->window,
  3800. &drawableWidth,
  3801. &drawableHeight);
  3802. if (drawableWidth < (Sint32)swapchainSupportDetails.capabilities.minImageExtent.width ||
  3803. drawableWidth > (Sint32)swapchainSupportDetails.capabilities.maxImageExtent.width ||
  3804. drawableHeight < (Sint32)swapchainSupportDetails.capabilities.minImageExtent.height ||
  3805. drawableHeight > (Sint32)swapchainSupportDetails.capabilities.maxImageExtent.height) {
  3806. if (swapchainSupportDetails.capabilities.currentExtent.width != UINT32_MAX) {
  3807. drawableWidth = VULKAN_INTERNAL_clamp(
  3808. drawableWidth,
  3809. (Sint32)swapchainSupportDetails.capabilities.minImageExtent.width,
  3810. (Sint32)swapchainSupportDetails.capabilities.maxImageExtent.width);
  3811. drawableHeight = VULKAN_INTERNAL_clamp(
  3812. drawableHeight,
  3813. (Sint32)swapchainSupportDetails.capabilities.minImageExtent.height,
  3814. (Sint32)swapchainSupportDetails.capabilities.maxImageExtent.height);
  3815. } else {
  3816. renderer->vkDestroySurfaceKHR(
  3817. renderer->instance,
  3818. swapchainData->surface,
  3819. NULL);
  3820. if (swapchainSupportDetails.formatsLength > 0) {
  3821. SDL_free(swapchainSupportDetails.formats);
  3822. }
  3823. if (swapchainSupportDetails.presentModesLength > 0) {
  3824. SDL_free(swapchainSupportDetails.presentModes);
  3825. }
  3826. SDL_free(swapchainData);
  3827. SDL_LogError(SDL_LOG_CATEGORY_GPU, "No fallback swapchain size available!");
  3828. return false;
  3829. }
  3830. }
  3831. swapchainData->imageCount = MAX_FRAMES_IN_FLIGHT;
  3832. if (swapchainSupportDetails.capabilities.maxImageCount > 0 &&
  3833. swapchainData->imageCount > swapchainSupportDetails.capabilities.maxImageCount) {
  3834. swapchainData->imageCount = swapchainSupportDetails.capabilities.maxImageCount;
  3835. }
  3836. if (swapchainData->presentMode == VK_PRESENT_MODE_MAILBOX_KHR) {
  3837. /* Required for proper triple-buffering.
  3838. * Note that this is below the above maxImageCount check!
  3839. * If the driver advertises MAILBOX but does not support 3 swap
  3840. * images, it's not real mailbox support, so let it fail hard.
  3841. * -flibit
  3842. */
  3843. swapchainData->imageCount = SDL_max(swapchainData->imageCount, 3);
  3844. }
  3845. swapchainCreateInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
  3846. swapchainCreateInfo.pNext = NULL;
  3847. swapchainCreateInfo.flags = 0;
  3848. swapchainCreateInfo.surface = swapchainData->surface;
  3849. swapchainCreateInfo.minImageCount = swapchainData->imageCount;
  3850. swapchainCreateInfo.imageFormat = swapchainData->format;
  3851. swapchainCreateInfo.imageColorSpace = swapchainData->colorSpace;
  3852. swapchainCreateInfo.imageExtent.width = drawableWidth;
  3853. swapchainCreateInfo.imageExtent.height = drawableHeight;
  3854. swapchainCreateInfo.imageArrayLayers = 1;
  3855. swapchainCreateInfo.imageUsage =
  3856. VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
  3857. VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  3858. swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
  3859. swapchainCreateInfo.queueFamilyIndexCount = 0;
  3860. swapchainCreateInfo.pQueueFamilyIndices = NULL;
  3861. swapchainCreateInfo.preTransform = swapchainSupportDetails.capabilities.currentTransform;
  3862. swapchainCreateInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
  3863. swapchainCreateInfo.presentMode = swapchainData->presentMode;
  3864. swapchainCreateInfo.clipped = VK_TRUE;
  3865. swapchainCreateInfo.oldSwapchain = VK_NULL_HANDLE;
  3866. vulkanResult = renderer->vkCreateSwapchainKHR(
  3867. renderer->logicalDevice,
  3868. &swapchainCreateInfo,
  3869. NULL,
  3870. &swapchainData->swapchain);
  3871. if (swapchainSupportDetails.formatsLength > 0) {
  3872. SDL_free(swapchainSupportDetails.formats);
  3873. }
  3874. if (swapchainSupportDetails.presentModesLength > 0) {
  3875. SDL_free(swapchainSupportDetails.presentModes);
  3876. }
  3877. if (vulkanResult != VK_SUCCESS) {
  3878. renderer->vkDestroySurfaceKHR(
  3879. renderer->instance,
  3880. swapchainData->surface,
  3881. NULL);
  3882. SDL_free(swapchainData);
  3883. LogVulkanResultAsError("vkCreateSwapchainKHR", vulkanResult);
  3884. return false;
  3885. }
  3886. renderer->vkGetSwapchainImagesKHR(
  3887. renderer->logicalDevice,
  3888. swapchainData->swapchain,
  3889. &swapchainData->imageCount,
  3890. NULL);
  3891. swapchainData->textureContainers = SDL_malloc(
  3892. sizeof(VulkanTextureContainer) * swapchainData->imageCount);
  3893. if (!swapchainData->textureContainers) {
  3894. renderer->vkDestroySurfaceKHR(
  3895. renderer->instance,
  3896. swapchainData->surface,
  3897. NULL);
  3898. SDL_free(swapchainData);
  3899. return false;
  3900. }
  3901. swapchainImages = SDL_stack_alloc(VkImage, swapchainData->imageCount);
  3902. renderer->vkGetSwapchainImagesKHR(
  3903. renderer->logicalDevice,
  3904. swapchainData->swapchain,
  3905. &swapchainData->imageCount,
  3906. swapchainImages);
  3907. for (i = 0; i < swapchainData->imageCount; i += 1) {
  3908. // Initialize dummy container
  3909. SDL_zero(swapchainData->textureContainers[i]);
  3910. swapchainData->textureContainers[i].canBeCycled = false;
  3911. swapchainData->textureContainers[i].header.info.width = drawableWidth;
  3912. swapchainData->textureContainers[i].header.info.height = drawableHeight;
  3913. swapchainData->textureContainers[i].header.info.layerCountOrDepth = 1;
  3914. swapchainData->textureContainers[i].header.info.format = SwapchainCompositionToSDLFormat(
  3915. windowData->swapchainComposition,
  3916. swapchainData->usingFallbackFormat);
  3917. swapchainData->textureContainers[i].header.info.type = SDL_GPU_TEXTURETYPE_2D;
  3918. swapchainData->textureContainers[i].header.info.levelCount = 1;
  3919. swapchainData->textureContainers[i].header.info.sampleCount = SDL_GPU_SAMPLECOUNT_1;
  3920. swapchainData->textureContainers[i].header.info.usageFlags = SDL_GPU_TEXTUREUSAGE_COLOR_TARGET_BIT;
  3921. swapchainData->textureContainers[i].activeTextureHandle = SDL_malloc(sizeof(VulkanTextureHandle));
  3922. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture = SDL_malloc(sizeof(VulkanTexture));
  3923. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->image = swapchainImages[i];
  3924. // Swapchain memory is managed by the driver
  3925. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->usedRegion = NULL;
  3926. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->dimensions.width = drawableWidth;
  3927. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->dimensions.height = drawableHeight;
  3928. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->format = swapchainData->format;
  3929. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->swizzle = swapchainData->swapchainSwizzle;
  3930. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->type = SDL_GPU_TEXTURETYPE_2D;
  3931. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->depth = 1;
  3932. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->layerCount = 1;
  3933. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->levelCount = 1;
  3934. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->sampleCount = VK_SAMPLE_COUNT_1_BIT;
  3935. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->usageFlags =
  3936. SDL_GPU_TEXTUREUSAGE_COLOR_TARGET_BIT;
  3937. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
  3938. SDL_AtomicSet(&swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->referenceCount, 0);
  3939. swapchainData->textureContainers[i].activeTextureHandle->container = NULL;
  3940. // Create slice
  3941. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresourceCount = 1;
  3942. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources = SDL_malloc(sizeof(VulkanTextureSubresource));
  3943. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources[0].parent = swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture;
  3944. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources[0].layer = 0;
  3945. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources[0].level = 0;
  3946. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources[0].transitioned = true;
  3947. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources[0].msaaTexHandle = NULL;
  3948. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources[0].renderTargetViews = SDL_malloc(sizeof(VkImageView));
  3949. VULKAN_INTERNAL_CreateRenderTargetView(
  3950. renderer,
  3951. swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture,
  3952. 0,
  3953. 0,
  3954. swapchainData->swapchainSwizzle,
  3955. &swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->subresources[0].renderTargetViews[0]);
  3956. }
  3957. SDL_stack_free(swapchainImages);
  3958. semaphoreCreateInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
  3959. semaphoreCreateInfo.pNext = NULL;
  3960. semaphoreCreateInfo.flags = 0;
  3961. for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
  3962. renderer->vkCreateSemaphore(
  3963. renderer->logicalDevice,
  3964. &semaphoreCreateInfo,
  3965. NULL,
  3966. &swapchainData->imageAvailableSemaphore[i]);
  3967. renderer->vkCreateSemaphore(
  3968. renderer->logicalDevice,
  3969. &semaphoreCreateInfo,
  3970. NULL,
  3971. &swapchainData->renderFinishedSemaphore[i]);
  3972. swapchainData->inFlightFences[i] = NULL;
  3973. }
  3974. windowData->swapchainData = swapchainData;
  3975. windowData->needsSwapchainRecreate = false;
  3976. return true;
  3977. }
  3978. // Command Buffers
  3979. static void VULKAN_INTERNAL_BeginCommandBuffer(
  3980. VulkanRenderer *renderer,
  3981. VulkanCommandBuffer *commandBuffer)
  3982. {
  3983. VkCommandBufferBeginInfo beginInfo;
  3984. VkResult result;
  3985. beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  3986. beginInfo.pNext = NULL;
  3987. beginInfo.flags = 0;
  3988. beginInfo.pInheritanceInfo = NULL;
  3989. beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  3990. result = renderer->vkBeginCommandBuffer(
  3991. commandBuffer->commandBuffer,
  3992. &beginInfo);
  3993. if (result != VK_SUCCESS) {
  3994. LogVulkanResultAsError("vkBeginCommandBuffer", result);
  3995. }
  3996. }
  3997. static void VULKAN_INTERNAL_EndCommandBuffer(
  3998. VulkanRenderer *renderer,
  3999. VulkanCommandBuffer *commandBuffer)
  4000. {
  4001. VkResult result;
  4002. result = renderer->vkEndCommandBuffer(
  4003. commandBuffer->commandBuffer);
  4004. if (result != VK_SUCCESS) {
  4005. LogVulkanResultAsError("vkEndCommandBuffer", result);
  4006. }
  4007. }
  4008. static void VULKAN_DestroyDevice(
  4009. SDL_GPUDevice *device)
  4010. {
  4011. VulkanRenderer *renderer = (VulkanRenderer *)device->driverData;
  4012. VulkanMemorySubAllocator *allocator;
  4013. VULKAN_Wait(device->driverData);
  4014. for (Sint32 i = renderer->claimedWindowCount - 1; i >= 0; i -= 1) {
  4015. VULKAN_ReleaseWindow(device->driverData, renderer->claimedWindows[i]->window);
  4016. }
  4017. SDL_free(renderer->claimedWindows);
  4018. VULKAN_Wait(device->driverData);
  4019. SDL_free(renderer->submittedCommandBuffers);
  4020. for (Uint32 i = 0; i < renderer->uniformBufferPoolCount; i += 1) {
  4021. VULKAN_INTERNAL_DestroyBuffer(
  4022. renderer,
  4023. renderer->uniformBufferPool[i]->bufferHandle->vulkanBuffer);
  4024. SDL_free(renderer->uniformBufferPool[i]->bufferHandle);
  4025. SDL_free(renderer->uniformBufferPool[i]);
  4026. }
  4027. SDL_free(renderer->uniformBufferPool);
  4028. for (Uint32 i = 0; i < renderer->fencePool.availableFenceCount; i += 1) {
  4029. renderer->vkDestroyFence(
  4030. renderer->logicalDevice,
  4031. renderer->fencePool.availableFences[i]->fence,
  4032. NULL);
  4033. SDL_free(renderer->fencePool.availableFences[i]);
  4034. }
  4035. SDL_free(renderer->fencePool.availableFences);
  4036. SDL_DestroyMutex(renderer->fencePool.lock);
  4037. SDL_DestroyHashTable(renderer->commandPoolHashTable);
  4038. SDL_DestroyHashTable(renderer->renderPassHashTable);
  4039. SDL_DestroyHashTable(renderer->framebufferHashTable);
  4040. for (Uint32 i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
  4041. allocator = &renderer->memoryAllocator->subAllocators[i];
  4042. for (Sint32 j = allocator->allocationCount - 1; j >= 0; j -= 1) {
  4043. for (Sint32 k = allocator->allocations[j]->usedRegionCount - 1; k >= 0; k -= 1) {
  4044. VULKAN_INTERNAL_RemoveMemoryUsedRegion(
  4045. renderer,
  4046. allocator->allocations[j]->usedRegions[k]);
  4047. }
  4048. VULKAN_INTERNAL_DeallocateMemory(
  4049. renderer,
  4050. allocator,
  4051. j);
  4052. }
  4053. if (renderer->memoryAllocator->subAllocators[i].allocations != NULL) {
  4054. SDL_free(renderer->memoryAllocator->subAllocators[i].allocations);
  4055. }
  4056. SDL_free(renderer->memoryAllocator->subAllocators[i].sortedFreeRegions);
  4057. }
  4058. SDL_free(renderer->memoryAllocator);
  4059. SDL_free(renderer->texturesToDestroy);
  4060. SDL_free(renderer->buffersToDestroy);
  4061. SDL_free(renderer->graphicsPipelinesToDestroy);
  4062. SDL_free(renderer->computePipelinesToDestroy);
  4063. SDL_free(renderer->shadersToDestroy);
  4064. SDL_free(renderer->samplersToDestroy);
  4065. SDL_free(renderer->framebuffersToDestroy);
  4066. SDL_free(renderer->allocationsToDefrag);
  4067. SDL_DestroyMutex(renderer->allocatorLock);
  4068. SDL_DestroyMutex(renderer->disposeLock);
  4069. SDL_DestroyMutex(renderer->submitLock);
  4070. SDL_DestroyMutex(renderer->acquireCommandBufferLock);
  4071. SDL_DestroyMutex(renderer->acquireUniformBufferLock);
  4072. SDL_DestroyMutex(renderer->renderPassFetchLock);
  4073. SDL_DestroyMutex(renderer->framebufferFetchLock);
  4074. renderer->vkDestroyDevice(renderer->logicalDevice, NULL);
  4075. renderer->vkDestroyInstance(renderer->instance, NULL);
  4076. SDL_free(renderer);
  4077. SDL_free(device);
  4078. SDL_Vulkan_UnloadLibrary();
  4079. }
  4080. static VkDescriptorSet VULKAN_INTERNAL_FetchDescriptorSet(
  4081. VulkanRenderer *renderer,
  4082. VulkanCommandBuffer *vulkanCommandBuffer,
  4083. DescriptorSetPool *descriptorSetPool)
  4084. {
  4085. VkDescriptorSet descriptorSet;
  4086. SDL_LockMutex(descriptorSetPool->lock);
  4087. // If no inactive descriptor sets remain, create a new pool and allocate new inactive sets
  4088. if (descriptorSetPool->inactiveDescriptorSetCount == 0) {
  4089. descriptorSetPool->descriptorPoolCount += 1;
  4090. descriptorSetPool->descriptorPools = SDL_realloc(
  4091. descriptorSetPool->descriptorPools,
  4092. sizeof(VkDescriptorPool) * descriptorSetPool->descriptorPoolCount);
  4093. if (!VULKAN_INTERNAL_CreateDescriptorPool(
  4094. renderer,
  4095. descriptorSetPool->descriptorInfos,
  4096. descriptorSetPool->descriptorInfoCount,
  4097. descriptorSetPool->nextPoolSize,
  4098. &descriptorSetPool->descriptorPools[descriptorSetPool->descriptorPoolCount - 1])) {
  4099. SDL_UnlockMutex(descriptorSetPool->lock);
  4100. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create descriptor pool!");
  4101. return VK_NULL_HANDLE;
  4102. }
  4103. descriptorSetPool->inactiveDescriptorSetCapacity += descriptorSetPool->nextPoolSize;
  4104. descriptorSetPool->inactiveDescriptorSets = SDL_realloc(
  4105. descriptorSetPool->inactiveDescriptorSets,
  4106. sizeof(VkDescriptorSet) * descriptorSetPool->inactiveDescriptorSetCapacity);
  4107. if (!VULKAN_INTERNAL_AllocateDescriptorSets(
  4108. renderer,
  4109. descriptorSetPool->descriptorPools[descriptorSetPool->descriptorPoolCount - 1],
  4110. descriptorSetPool->descriptorSetLayout,
  4111. descriptorSetPool->nextPoolSize,
  4112. descriptorSetPool->inactiveDescriptorSets)) {
  4113. SDL_UnlockMutex(descriptorSetPool->lock);
  4114. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to allocate descriptor sets!");
  4115. return VK_NULL_HANDLE;
  4116. }
  4117. descriptorSetPool->inactiveDescriptorSetCount = descriptorSetPool->nextPoolSize;
  4118. descriptorSetPool->nextPoolSize *= 2;
  4119. }
  4120. descriptorSet = descriptorSetPool->inactiveDescriptorSets[descriptorSetPool->inactiveDescriptorSetCount - 1];
  4121. descriptorSetPool->inactiveDescriptorSetCount -= 1;
  4122. SDL_UnlockMutex(descriptorSetPool->lock);
  4123. if (vulkanCommandBuffer->boundDescriptorSetDataCount == vulkanCommandBuffer->boundDescriptorSetDataCapacity) {
  4124. vulkanCommandBuffer->boundDescriptorSetDataCapacity *= 2;
  4125. vulkanCommandBuffer->boundDescriptorSetDatas = SDL_realloc(
  4126. vulkanCommandBuffer->boundDescriptorSetDatas,
  4127. vulkanCommandBuffer->boundDescriptorSetDataCapacity * sizeof(DescriptorSetData));
  4128. }
  4129. vulkanCommandBuffer->boundDescriptorSetDatas[vulkanCommandBuffer->boundDescriptorSetDataCount].descriptorSet = descriptorSet;
  4130. vulkanCommandBuffer->boundDescriptorSetDatas[vulkanCommandBuffer->boundDescriptorSetDataCount].descriptorSetPool = descriptorSetPool;
  4131. vulkanCommandBuffer->boundDescriptorSetDataCount += 1;
  4132. return descriptorSet;
  4133. }
  4134. static void VULKAN_INTERNAL_BindGraphicsDescriptorSets(
  4135. VulkanRenderer *renderer,
  4136. VulkanCommandBuffer *commandBuffer)
  4137. {
  4138. VulkanGraphicsPipelineResourceLayout *resourceLayout;
  4139. VkWriteDescriptorSet *writeDescriptorSets;
  4140. VkWriteDescriptorSet *currentWriteDescriptorSet;
  4141. DescriptorSetPool *descriptorSetPool;
  4142. VkDescriptorBufferInfo bufferInfos[MAX_STORAGE_BUFFERS_PER_STAGE];
  4143. VkDescriptorImageInfo imageInfos[MAX_TEXTURE_SAMPLERS_PER_STAGE + MAX_STORAGE_TEXTURES_PER_STAGE];
  4144. Uint32 dynamicOffsets[MAX_UNIFORM_BUFFERS_PER_STAGE];
  4145. Uint32 bufferInfoCount = 0;
  4146. Uint32 imageInfoCount = 0;
  4147. Uint32 i;
  4148. resourceLayout = &commandBuffer->currentGraphicsPipeline->resourceLayout;
  4149. if (commandBuffer->needNewVertexResourceDescriptorSet) {
  4150. descriptorSetPool = &resourceLayout->descriptorSetPools[0];
  4151. commandBuffer->vertexResourceDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  4152. renderer,
  4153. commandBuffer,
  4154. descriptorSetPool);
  4155. writeDescriptorSets = SDL_stack_alloc(
  4156. VkWriteDescriptorSet,
  4157. resourceLayout->vertexSamplerCount +
  4158. resourceLayout->vertexStorageTextureCount +
  4159. resourceLayout->vertexStorageBufferCount);
  4160. for (i = 0; i < resourceLayout->vertexSamplerCount; i += 1) {
  4161. currentWriteDescriptorSet = &writeDescriptorSets[i];
  4162. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4163. currentWriteDescriptorSet->pNext = NULL;
  4164. currentWriteDescriptorSet->descriptorCount = 1;
  4165. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  4166. currentWriteDescriptorSet->dstArrayElement = 0;
  4167. currentWriteDescriptorSet->dstBinding = i;
  4168. currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet;
  4169. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4170. currentWriteDescriptorSet->pBufferInfo = NULL;
  4171. imageInfos[imageInfoCount].sampler = commandBuffer->vertexSamplers[i]->sampler;
  4172. imageInfos[imageInfoCount].imageView = commandBuffer->vertexSamplerTextures[i]->fullView;
  4173. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  4174. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  4175. imageInfoCount += 1;
  4176. }
  4177. for (i = 0; i < resourceLayout->vertexStorageTextureCount; i += 1) {
  4178. currentWriteDescriptorSet = &writeDescriptorSets[resourceLayout->vertexSamplerCount + i];
  4179. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4180. currentWriteDescriptorSet->pNext = NULL;
  4181. currentWriteDescriptorSet->descriptorCount = 1;
  4182. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  4183. currentWriteDescriptorSet->dstArrayElement = 0;
  4184. currentWriteDescriptorSet->dstBinding = resourceLayout->vertexSamplerCount + i;
  4185. currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet;
  4186. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4187. currentWriteDescriptorSet->pBufferInfo = NULL;
  4188. imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
  4189. imageInfos[imageInfoCount].imageView = commandBuffer->vertexStorageTextures[i]->fullView;
  4190. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  4191. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  4192. imageInfoCount += 1;
  4193. }
  4194. for (i = 0; i < resourceLayout->vertexStorageBufferCount; i += 1) {
  4195. currentWriteDescriptorSet = &writeDescriptorSets[resourceLayout->vertexSamplerCount + resourceLayout->vertexStorageTextureCount + i];
  4196. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4197. currentWriteDescriptorSet->pNext = NULL;
  4198. currentWriteDescriptorSet->descriptorCount = 1;
  4199. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  4200. currentWriteDescriptorSet->dstArrayElement = 0;
  4201. currentWriteDescriptorSet->dstBinding = resourceLayout->vertexSamplerCount + resourceLayout->vertexStorageTextureCount + i;
  4202. currentWriteDescriptorSet->dstSet = commandBuffer->vertexResourceDescriptorSet;
  4203. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4204. currentWriteDescriptorSet->pImageInfo = NULL;
  4205. bufferInfos[bufferInfoCount].buffer = commandBuffer->vertexStorageBuffers[i]->buffer;
  4206. bufferInfos[bufferInfoCount].offset = 0;
  4207. bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
  4208. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  4209. bufferInfoCount += 1;
  4210. }
  4211. renderer->vkUpdateDescriptorSets(
  4212. renderer->logicalDevice,
  4213. resourceLayout->vertexSamplerCount + resourceLayout->vertexStorageTextureCount + resourceLayout->vertexStorageBufferCount,
  4214. writeDescriptorSets,
  4215. 0,
  4216. NULL);
  4217. renderer->vkCmdBindDescriptorSets(
  4218. commandBuffer->commandBuffer,
  4219. VK_PIPELINE_BIND_POINT_GRAPHICS,
  4220. resourceLayout->pipelineLayout,
  4221. 0,
  4222. 1,
  4223. &commandBuffer->vertexResourceDescriptorSet,
  4224. 0,
  4225. NULL);
  4226. SDL_stack_free(writeDescriptorSets);
  4227. bufferInfoCount = 0;
  4228. imageInfoCount = 0;
  4229. commandBuffer->needNewVertexResourceDescriptorSet = false;
  4230. }
  4231. if (commandBuffer->needNewVertexUniformDescriptorSet) {
  4232. descriptorSetPool = &resourceLayout->descriptorSetPools[1];
  4233. commandBuffer->vertexUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  4234. renderer,
  4235. commandBuffer,
  4236. descriptorSetPool);
  4237. writeDescriptorSets = SDL_stack_alloc(
  4238. VkWriteDescriptorSet,
  4239. resourceLayout->vertexUniformBufferCount);
  4240. for (i = 0; i < resourceLayout->vertexUniformBufferCount; i += 1) {
  4241. currentWriteDescriptorSet = &writeDescriptorSets[i];
  4242. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4243. currentWriteDescriptorSet->pNext = NULL;
  4244. currentWriteDescriptorSet->descriptorCount = 1;
  4245. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  4246. currentWriteDescriptorSet->dstArrayElement = 0;
  4247. currentWriteDescriptorSet->dstBinding = i;
  4248. currentWriteDescriptorSet->dstSet = commandBuffer->vertexUniformDescriptorSet;
  4249. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4250. currentWriteDescriptorSet->pImageInfo = NULL;
  4251. bufferInfos[bufferInfoCount].buffer = commandBuffer->vertexUniformBuffers[i]->bufferHandle->vulkanBuffer->buffer;
  4252. bufferInfos[bufferInfoCount].offset = 0;
  4253. bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE;
  4254. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  4255. bufferInfoCount += 1;
  4256. }
  4257. renderer->vkUpdateDescriptorSets(
  4258. renderer->logicalDevice,
  4259. resourceLayout->vertexUniformBufferCount,
  4260. writeDescriptorSets,
  4261. 0,
  4262. NULL);
  4263. SDL_stack_free(writeDescriptorSets);
  4264. bufferInfoCount = 0;
  4265. imageInfoCount = 0;
  4266. commandBuffer->needNewVertexUniformDescriptorSet = false;
  4267. commandBuffer->needNewVertexUniformOffsets = true;
  4268. }
  4269. if (commandBuffer->needNewVertexUniformOffsets) {
  4270. for (i = 0; i < resourceLayout->vertexUniformBufferCount; i += 1) {
  4271. dynamicOffsets[i] = commandBuffer->vertexUniformBuffers[i]->drawOffset;
  4272. }
  4273. renderer->vkCmdBindDescriptorSets(
  4274. commandBuffer->commandBuffer,
  4275. VK_PIPELINE_BIND_POINT_GRAPHICS,
  4276. resourceLayout->pipelineLayout,
  4277. 1,
  4278. 1,
  4279. &commandBuffer->vertexUniformDescriptorSet,
  4280. resourceLayout->vertexUniformBufferCount,
  4281. dynamicOffsets);
  4282. commandBuffer->needNewVertexUniformOffsets = false;
  4283. }
  4284. if (commandBuffer->needNewFragmentResourceDescriptorSet) {
  4285. descriptorSetPool = &resourceLayout->descriptorSetPools[2];
  4286. commandBuffer->fragmentResourceDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  4287. renderer,
  4288. commandBuffer,
  4289. descriptorSetPool);
  4290. writeDescriptorSets = SDL_stack_alloc(
  4291. VkWriteDescriptorSet,
  4292. resourceLayout->fragmentSamplerCount +
  4293. resourceLayout->fragmentStorageTextureCount +
  4294. resourceLayout->fragmentStorageBufferCount);
  4295. for (i = 0; i < resourceLayout->fragmentSamplerCount; i += 1) {
  4296. currentWriteDescriptorSet = &writeDescriptorSets[i];
  4297. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4298. currentWriteDescriptorSet->pNext = NULL;
  4299. currentWriteDescriptorSet->descriptorCount = 1;
  4300. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  4301. currentWriteDescriptorSet->dstArrayElement = 0;
  4302. currentWriteDescriptorSet->dstBinding = i;
  4303. currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet;
  4304. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4305. currentWriteDescriptorSet->pBufferInfo = NULL;
  4306. imageInfos[imageInfoCount].sampler = commandBuffer->fragmentSamplers[i]->sampler;
  4307. imageInfos[imageInfoCount].imageView = commandBuffer->fragmentSamplerTextures[i]->fullView;
  4308. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  4309. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  4310. imageInfoCount += 1;
  4311. }
  4312. for (i = 0; i < resourceLayout->fragmentStorageTextureCount; i += 1) {
  4313. currentWriteDescriptorSet = &writeDescriptorSets[resourceLayout->fragmentSamplerCount + i];
  4314. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4315. currentWriteDescriptorSet->pNext = NULL;
  4316. currentWriteDescriptorSet->descriptorCount = 1;
  4317. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  4318. currentWriteDescriptorSet->dstArrayElement = 0;
  4319. currentWriteDescriptorSet->dstBinding = resourceLayout->fragmentSamplerCount + i;
  4320. currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet;
  4321. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4322. currentWriteDescriptorSet->pBufferInfo = NULL;
  4323. imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
  4324. imageInfos[imageInfoCount].imageView = commandBuffer->fragmentStorageTextures[i]->fullView;
  4325. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  4326. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  4327. imageInfoCount += 1;
  4328. }
  4329. for (i = 0; i < resourceLayout->fragmentStorageBufferCount; i += 1) {
  4330. currentWriteDescriptorSet = &writeDescriptorSets[resourceLayout->fragmentSamplerCount + resourceLayout->fragmentStorageTextureCount + i];
  4331. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4332. currentWriteDescriptorSet->pNext = NULL;
  4333. currentWriteDescriptorSet->descriptorCount = 1;
  4334. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  4335. currentWriteDescriptorSet->dstArrayElement = 0;
  4336. currentWriteDescriptorSet->dstBinding = resourceLayout->fragmentSamplerCount + resourceLayout->fragmentStorageTextureCount + i;
  4337. currentWriteDescriptorSet->dstSet = commandBuffer->fragmentResourceDescriptorSet;
  4338. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4339. currentWriteDescriptorSet->pImageInfo = NULL;
  4340. bufferInfos[bufferInfoCount].buffer = commandBuffer->fragmentStorageBuffers[i]->buffer;
  4341. bufferInfos[bufferInfoCount].offset = 0;
  4342. bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
  4343. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  4344. bufferInfoCount += 1;
  4345. }
  4346. renderer->vkUpdateDescriptorSets(
  4347. renderer->logicalDevice,
  4348. resourceLayout->fragmentSamplerCount + resourceLayout->fragmentStorageTextureCount + resourceLayout->fragmentStorageBufferCount,
  4349. writeDescriptorSets,
  4350. 0,
  4351. NULL);
  4352. renderer->vkCmdBindDescriptorSets(
  4353. commandBuffer->commandBuffer,
  4354. VK_PIPELINE_BIND_POINT_GRAPHICS,
  4355. resourceLayout->pipelineLayout,
  4356. 2,
  4357. 1,
  4358. &commandBuffer->fragmentResourceDescriptorSet,
  4359. 0,
  4360. NULL);
  4361. SDL_stack_free(writeDescriptorSets);
  4362. bufferInfoCount = 0;
  4363. imageInfoCount = 0;
  4364. commandBuffer->needNewFragmentResourceDescriptorSet = false;
  4365. }
  4366. if (commandBuffer->needNewFragmentUniformDescriptorSet) {
  4367. descriptorSetPool = &resourceLayout->descriptorSetPools[3];
  4368. commandBuffer->fragmentUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  4369. renderer,
  4370. commandBuffer,
  4371. descriptorSetPool);
  4372. writeDescriptorSets = SDL_stack_alloc(
  4373. VkWriteDescriptorSet,
  4374. resourceLayout->fragmentUniformBufferCount);
  4375. for (i = 0; i < resourceLayout->fragmentUniformBufferCount; i += 1) {
  4376. currentWriteDescriptorSet = &writeDescriptorSets[i];
  4377. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  4378. currentWriteDescriptorSet->pNext = NULL;
  4379. currentWriteDescriptorSet->descriptorCount = 1;
  4380. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  4381. currentWriteDescriptorSet->dstArrayElement = 0;
  4382. currentWriteDescriptorSet->dstBinding = i;
  4383. currentWriteDescriptorSet->dstSet = commandBuffer->fragmentUniformDescriptorSet;
  4384. currentWriteDescriptorSet->pTexelBufferView = NULL;
  4385. currentWriteDescriptorSet->pImageInfo = NULL;
  4386. bufferInfos[bufferInfoCount].buffer = commandBuffer->fragmentUniformBuffers[i]->bufferHandle->vulkanBuffer->buffer;
  4387. bufferInfos[bufferInfoCount].offset = 0;
  4388. bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE;
  4389. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  4390. bufferInfoCount += 1;
  4391. }
  4392. renderer->vkUpdateDescriptorSets(
  4393. renderer->logicalDevice,
  4394. resourceLayout->fragmentUniformBufferCount,
  4395. writeDescriptorSets,
  4396. 0,
  4397. NULL);
  4398. SDL_stack_free(writeDescriptorSets);
  4399. bufferInfoCount = 0;
  4400. imageInfoCount = 0;
  4401. commandBuffer->needNewFragmentUniformDescriptorSet = false;
  4402. commandBuffer->needNewFragmentUniformOffsets = true;
  4403. }
  4404. if (commandBuffer->needNewFragmentUniformOffsets) {
  4405. for (i = 0; i < resourceLayout->fragmentUniformBufferCount; i += 1) {
  4406. dynamicOffsets[i] = commandBuffer->fragmentUniformBuffers[i]->drawOffset;
  4407. }
  4408. renderer->vkCmdBindDescriptorSets(
  4409. commandBuffer->commandBuffer,
  4410. VK_PIPELINE_BIND_POINT_GRAPHICS,
  4411. resourceLayout->pipelineLayout,
  4412. 3,
  4413. 1,
  4414. &commandBuffer->fragmentUniformDescriptorSet,
  4415. resourceLayout->fragmentUniformBufferCount,
  4416. dynamicOffsets);
  4417. commandBuffer->needNewFragmentUniformOffsets = false;
  4418. }
  4419. }
  4420. static void VULKAN_DrawIndexedPrimitives(
  4421. SDL_GPUCommandBuffer *commandBuffer,
  4422. Uint32 indexCount,
  4423. Uint32 instanceCount,
  4424. Uint32 firstIndex,
  4425. Sint32 vertexOffset,
  4426. Uint32 firstInstance)
  4427. {
  4428. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4429. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  4430. VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
  4431. renderer->vkCmdDrawIndexed(
  4432. vulkanCommandBuffer->commandBuffer,
  4433. indexCount,
  4434. instanceCount,
  4435. firstIndex,
  4436. vertexOffset,
  4437. firstInstance);
  4438. }
  4439. static void VULKAN_DrawPrimitives(
  4440. SDL_GPUCommandBuffer *commandBuffer,
  4441. Uint32 vertexCount,
  4442. Uint32 instanceCount,
  4443. Uint32 firstVertex,
  4444. Uint32 firstInstance)
  4445. {
  4446. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4447. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  4448. VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
  4449. renderer->vkCmdDraw(
  4450. vulkanCommandBuffer->commandBuffer,
  4451. vertexCount,
  4452. instanceCount,
  4453. firstVertex,
  4454. firstInstance);
  4455. }
  4456. static void VULKAN_DrawPrimitivesIndirect(
  4457. SDL_GPUCommandBuffer *commandBuffer,
  4458. SDL_GPUBuffer *buffer,
  4459. Uint32 offsetInBytes,
  4460. Uint32 drawCount,
  4461. Uint32 stride)
  4462. {
  4463. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4464. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  4465. VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBufferHandle->vulkanBuffer;
  4466. Uint32 i;
  4467. VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
  4468. if (renderer->supportsMultiDrawIndirect) {
  4469. // Real multi-draw!
  4470. renderer->vkCmdDrawIndirect(
  4471. vulkanCommandBuffer->commandBuffer,
  4472. vulkanBuffer->buffer,
  4473. offsetInBytes,
  4474. drawCount,
  4475. stride);
  4476. } else {
  4477. // Fake multi-draw...
  4478. for (i = 0; i < drawCount; i += 1) {
  4479. renderer->vkCmdDrawIndirect(
  4480. vulkanCommandBuffer->commandBuffer,
  4481. vulkanBuffer->buffer,
  4482. offsetInBytes + (stride * i),
  4483. 1,
  4484. stride);
  4485. }
  4486. }
  4487. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
  4488. }
  4489. static void VULKAN_DrawIndexedPrimitivesIndirect(
  4490. SDL_GPUCommandBuffer *commandBuffer,
  4491. SDL_GPUBuffer *buffer,
  4492. Uint32 offsetInBytes,
  4493. Uint32 drawCount,
  4494. Uint32 stride)
  4495. {
  4496. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4497. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  4498. VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBufferHandle->vulkanBuffer;
  4499. Uint32 i;
  4500. VULKAN_INTERNAL_BindGraphicsDescriptorSets(renderer, vulkanCommandBuffer);
  4501. if (renderer->supportsMultiDrawIndirect) {
  4502. // Real multi-draw!
  4503. renderer->vkCmdDrawIndexedIndirect(
  4504. vulkanCommandBuffer->commandBuffer,
  4505. vulkanBuffer->buffer,
  4506. offsetInBytes,
  4507. drawCount,
  4508. stride);
  4509. } else {
  4510. // Fake multi-draw...
  4511. for (i = 0; i < drawCount; i += 1) {
  4512. renderer->vkCmdDrawIndexedIndirect(
  4513. vulkanCommandBuffer->commandBuffer,
  4514. vulkanBuffer->buffer,
  4515. offsetInBytes + (stride * i),
  4516. 1,
  4517. stride);
  4518. }
  4519. }
  4520. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
  4521. }
  4522. // Debug Naming
  4523. static void VULKAN_INTERNAL_SetBufferName(
  4524. VulkanRenderer *renderer,
  4525. VulkanBuffer *buffer,
  4526. const char *text)
  4527. {
  4528. VkDebugUtilsObjectNameInfoEXT nameInfo;
  4529. if (renderer->debugMode && renderer->supportsDebugUtils) {
  4530. nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  4531. nameInfo.pNext = NULL;
  4532. nameInfo.pObjectName = text;
  4533. nameInfo.objectType = VK_OBJECT_TYPE_BUFFER;
  4534. nameInfo.objectHandle = (uint64_t)buffer->buffer;
  4535. renderer->vkSetDebugUtilsObjectNameEXT(
  4536. renderer->logicalDevice,
  4537. &nameInfo);
  4538. }
  4539. }
  4540. static void VULKAN_SetBufferName(
  4541. SDL_GPURenderer *driverData,
  4542. SDL_GPUBuffer *buffer,
  4543. const char *text)
  4544. {
  4545. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  4546. VulkanBufferContainer *container = (VulkanBufferContainer *)buffer;
  4547. size_t textLength = SDL_strlen(text) + 1;
  4548. if (renderer->debugMode && renderer->supportsDebugUtils) {
  4549. container->debugName = SDL_realloc(
  4550. container->debugName,
  4551. textLength);
  4552. SDL_utf8strlcpy(
  4553. container->debugName,
  4554. text,
  4555. textLength);
  4556. for (Uint32 i = 0; i < container->bufferCount; i += 1) {
  4557. VULKAN_INTERNAL_SetBufferName(
  4558. renderer,
  4559. container->bufferHandles[i]->vulkanBuffer,
  4560. text);
  4561. }
  4562. }
  4563. }
  4564. static void VULKAN_INTERNAL_SetTextureName(
  4565. VulkanRenderer *renderer,
  4566. VulkanTexture *texture,
  4567. const char *text)
  4568. {
  4569. VkDebugUtilsObjectNameInfoEXT nameInfo;
  4570. if (renderer->debugMode && renderer->supportsDebugUtils) {
  4571. nameInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_OBJECT_NAME_INFO_EXT;
  4572. nameInfo.pNext = NULL;
  4573. nameInfo.pObjectName = text;
  4574. nameInfo.objectType = VK_OBJECT_TYPE_IMAGE;
  4575. nameInfo.objectHandle = (uint64_t)texture->image;
  4576. renderer->vkSetDebugUtilsObjectNameEXT(
  4577. renderer->logicalDevice,
  4578. &nameInfo);
  4579. }
  4580. }
  4581. static void VULKAN_SetTextureName(
  4582. SDL_GPURenderer *driverData,
  4583. SDL_GPUTexture *texture,
  4584. const char *text)
  4585. {
  4586. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  4587. VulkanTextureContainer *container = (VulkanTextureContainer *)texture;
  4588. size_t textLength = SDL_strlen(text) + 1;
  4589. if (renderer->debugMode && renderer->supportsDebugUtils) {
  4590. container->debugName = SDL_realloc(
  4591. container->debugName,
  4592. textLength);
  4593. SDL_utf8strlcpy(
  4594. container->debugName,
  4595. text,
  4596. textLength);
  4597. for (Uint32 i = 0; i < container->textureCount; i += 1) {
  4598. VULKAN_INTERNAL_SetTextureName(
  4599. renderer,
  4600. container->textureHandles[i]->vulkanTexture,
  4601. text);
  4602. }
  4603. }
  4604. }
  4605. static void VULKAN_InsertDebugLabel(
  4606. SDL_GPUCommandBuffer *commandBuffer,
  4607. const char *text)
  4608. {
  4609. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4610. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  4611. VkDebugUtilsLabelEXT labelInfo;
  4612. if (renderer->supportsDebugUtils) {
  4613. labelInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
  4614. labelInfo.pNext = NULL;
  4615. labelInfo.pLabelName = text;
  4616. renderer->vkCmdInsertDebugUtilsLabelEXT(
  4617. vulkanCommandBuffer->commandBuffer,
  4618. &labelInfo);
  4619. }
  4620. }
  4621. static void VULKAN_PushDebugGroup(
  4622. SDL_GPUCommandBuffer *commandBuffer,
  4623. const char *name)
  4624. {
  4625. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4626. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  4627. VkDebugUtilsLabelEXT labelInfo;
  4628. if (renderer->supportsDebugUtils) {
  4629. labelInfo.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT;
  4630. labelInfo.pNext = NULL;
  4631. labelInfo.pLabelName = name;
  4632. renderer->vkCmdBeginDebugUtilsLabelEXT(
  4633. vulkanCommandBuffer->commandBuffer,
  4634. &labelInfo);
  4635. }
  4636. }
  4637. static void VULKAN_PopDebugGroup(
  4638. SDL_GPUCommandBuffer *commandBuffer)
  4639. {
  4640. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  4641. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  4642. if (renderer->supportsDebugUtils) {
  4643. renderer->vkCmdEndDebugUtilsLabelEXT(vulkanCommandBuffer->commandBuffer);
  4644. }
  4645. }
  4646. static VulkanTextureHandle *VULKAN_INTERNAL_CreateTextureHandle(
  4647. VulkanRenderer *renderer,
  4648. Uint32 width,
  4649. Uint32 height,
  4650. Uint32 depth,
  4651. SDL_GPUTextureType type,
  4652. Uint32 layerCount,
  4653. Uint32 levelCount,
  4654. VkSampleCountFlagBits sampleCount,
  4655. VkFormat format,
  4656. VkComponentMapping swizzle,
  4657. VkImageAspectFlags aspectMask,
  4658. SDL_GPUTextureUsageFlags textureUsageFlags,
  4659. bool isMSAAColorTarget)
  4660. {
  4661. VulkanTextureHandle *textureHandle;
  4662. VulkanTexture *texture;
  4663. texture = VULKAN_INTERNAL_CreateTexture(
  4664. renderer,
  4665. width,
  4666. height,
  4667. depth,
  4668. type,
  4669. layerCount,
  4670. levelCount,
  4671. sampleCount,
  4672. format,
  4673. swizzle,
  4674. aspectMask,
  4675. textureUsageFlags,
  4676. isMSAAColorTarget);
  4677. if (texture == NULL) {
  4678. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create texture!");
  4679. return NULL;
  4680. }
  4681. textureHandle = SDL_malloc(sizeof(VulkanTextureHandle));
  4682. textureHandle->vulkanTexture = texture;
  4683. textureHandle->container = NULL;
  4684. texture->handle = textureHandle;
  4685. return textureHandle;
  4686. }
  4687. static VulkanTexture *VULKAN_INTERNAL_CreateTexture(
  4688. VulkanRenderer *renderer,
  4689. Uint32 width,
  4690. Uint32 height,
  4691. Uint32 depth,
  4692. SDL_GPUTextureType type,
  4693. Uint32 layerCount,
  4694. Uint32 levelCount,
  4695. VkSampleCountFlagBits sampleCount,
  4696. VkFormat format,
  4697. VkComponentMapping swizzle,
  4698. VkImageAspectFlags aspectMask,
  4699. SDL_GPUTextureUsageFlags textureUsageFlags,
  4700. bool isMSAAColorTarget)
  4701. {
  4702. VkResult vulkanResult;
  4703. VkImageCreateInfo imageCreateInfo;
  4704. VkImageCreateFlags imageCreateFlags = 0;
  4705. VkImageViewCreateInfo imageViewCreateInfo;
  4706. Uint8 bindResult;
  4707. Uint8 isRenderTarget =
  4708. ((textureUsageFlags & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET_BIT) != 0) ||
  4709. ((textureUsageFlags & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET_BIT) != 0);
  4710. VkImageUsageFlags vkUsageFlags = VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  4711. VulkanTexture *texture = SDL_malloc(sizeof(VulkanTexture));
  4712. texture->type = type;
  4713. texture->isMSAAColorTarget = isMSAAColorTarget;
  4714. texture->markedForDestroy = 0;
  4715. if (type == SDL_GPU_TEXTURETYPE_CUBE) {
  4716. imageCreateFlags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
  4717. } else if (type == SDL_GPU_TEXTURETYPE_3D) {
  4718. imageCreateFlags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
  4719. }
  4720. if (textureUsageFlags & SDL_GPU_TEXTUREUSAGE_SAMPLER_BIT) {
  4721. vkUsageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT;
  4722. }
  4723. if (textureUsageFlags & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET_BIT) {
  4724. vkUsageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  4725. }
  4726. if (textureUsageFlags & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET_BIT) {
  4727. vkUsageFlags |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  4728. }
  4729. if (textureUsageFlags & (SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ_BIT |
  4730. SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ_BIT |
  4731. SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE_BIT)) {
  4732. vkUsageFlags |= VK_IMAGE_USAGE_STORAGE_BIT;
  4733. }
  4734. imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
  4735. imageCreateInfo.pNext = NULL;
  4736. imageCreateInfo.flags = imageCreateFlags;
  4737. imageCreateInfo.imageType = type == SDL_GPU_TEXTURETYPE_3D ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D;
  4738. imageCreateInfo.format = format;
  4739. imageCreateInfo.extent.width = width;
  4740. imageCreateInfo.extent.height = height;
  4741. imageCreateInfo.extent.depth = depth;
  4742. imageCreateInfo.mipLevels = levelCount;
  4743. imageCreateInfo.arrayLayers = layerCount;
  4744. imageCreateInfo.samples = isMSAAColorTarget || VULKAN_INTERNAL_IsVulkanDepthFormat(format) ? sampleCount : VK_SAMPLE_COUNT_1_BIT;
  4745. imageCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
  4746. imageCreateInfo.usage = vkUsageFlags;
  4747. imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  4748. imageCreateInfo.queueFamilyIndexCount = 0;
  4749. imageCreateInfo.pQueueFamilyIndices = NULL;
  4750. imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  4751. vulkanResult = renderer->vkCreateImage(
  4752. renderer->logicalDevice,
  4753. &imageCreateInfo,
  4754. NULL,
  4755. &texture->image);
  4756. VULKAN_ERROR_CHECK(vulkanResult, vkCreateImage, 0)
  4757. bindResult = VULKAN_INTERNAL_BindMemoryForImage(
  4758. renderer,
  4759. texture->image,
  4760. &texture->usedRegion);
  4761. if (bindResult != 1) {
  4762. renderer->vkDestroyImage(
  4763. renderer->logicalDevice,
  4764. texture->image,
  4765. NULL);
  4766. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unable to bind memory for texture!");
  4767. return NULL;
  4768. }
  4769. texture->usedRegion->vulkanTexture = texture; // lol
  4770. texture->fullView = VK_NULL_HANDLE;
  4771. if (
  4772. (textureUsageFlags & SDL_GPU_TEXTUREUSAGE_SAMPLER_BIT) ||
  4773. (textureUsageFlags & SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ_BIT) ||
  4774. (textureUsageFlags & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ_BIT)) {
  4775. imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  4776. imageViewCreateInfo.pNext = NULL;
  4777. imageViewCreateInfo.flags = 0;
  4778. imageViewCreateInfo.image = texture->image;
  4779. imageViewCreateInfo.format = format;
  4780. imageViewCreateInfo.components = swizzle;
  4781. imageViewCreateInfo.subresourceRange.aspectMask = aspectMask;
  4782. imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
  4783. imageViewCreateInfo.subresourceRange.levelCount = levelCount;
  4784. imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
  4785. imageViewCreateInfo.subresourceRange.layerCount = layerCount;
  4786. if (type == SDL_GPU_TEXTURETYPE_CUBE) {
  4787. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
  4788. } else if (type == SDL_GPU_TEXTURETYPE_3D) {
  4789. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_3D;
  4790. } else if (type == SDL_GPU_TEXTURETYPE_2D_ARRAY) {
  4791. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY;
  4792. } else {
  4793. imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
  4794. }
  4795. vulkanResult = renderer->vkCreateImageView(
  4796. renderer->logicalDevice,
  4797. &imageViewCreateInfo,
  4798. NULL,
  4799. &texture->fullView);
  4800. if (vulkanResult != VK_SUCCESS) {
  4801. LogVulkanResultAsError("vkCreateImageView", vulkanResult);
  4802. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create texture image view");
  4803. return NULL;
  4804. }
  4805. }
  4806. texture->dimensions.width = width;
  4807. texture->dimensions.height = height;
  4808. texture->depth = depth;
  4809. texture->format = format;
  4810. texture->swizzle = swizzle;
  4811. texture->levelCount = levelCount;
  4812. texture->layerCount = layerCount;
  4813. texture->sampleCount = sampleCount;
  4814. texture->usageFlags = textureUsageFlags;
  4815. texture->aspectFlags = aspectMask;
  4816. SDL_AtomicSet(&texture->referenceCount, 0);
  4817. // Define slices
  4818. texture->subresourceCount =
  4819. texture->layerCount *
  4820. texture->levelCount;
  4821. texture->subresources = SDL_malloc(
  4822. texture->subresourceCount * sizeof(VulkanTextureSubresource));
  4823. for (Uint32 i = 0; i < texture->layerCount; i += 1) {
  4824. for (Uint32 j = 0; j < texture->levelCount; j += 1) {
  4825. Uint32 subresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex(
  4826. j,
  4827. i,
  4828. texture->levelCount);
  4829. texture->subresources[subresourceIndex].renderTargetViews = NULL;
  4830. texture->subresources[subresourceIndex].computeWriteView = VK_NULL_HANDLE;
  4831. texture->subresources[subresourceIndex].depthStencilView = VK_NULL_HANDLE;
  4832. if (textureUsageFlags & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET_BIT) {
  4833. texture->subresources[subresourceIndex].renderTargetViews = SDL_malloc(
  4834. texture->depth * sizeof(VkImageView));
  4835. if (texture->depth > 1) {
  4836. for (Uint32 k = 0; k < texture->depth; k += 1) {
  4837. VULKAN_INTERNAL_CreateRenderTargetView(
  4838. renderer,
  4839. texture,
  4840. k,
  4841. j,
  4842. swizzle,
  4843. &texture->subresources[subresourceIndex].renderTargetViews[k]);
  4844. }
  4845. } else {
  4846. VULKAN_INTERNAL_CreateRenderTargetView(
  4847. renderer,
  4848. texture,
  4849. i,
  4850. j,
  4851. swizzle,
  4852. &texture->subresources[subresourceIndex].renderTargetViews[0]);
  4853. }
  4854. }
  4855. if (textureUsageFlags & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE_BIT) {
  4856. VULKAN_INTERNAL_CreateSubresourceView(
  4857. renderer,
  4858. texture,
  4859. i,
  4860. j,
  4861. swizzle,
  4862. &texture->subresources[subresourceIndex].computeWriteView);
  4863. }
  4864. if (textureUsageFlags & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET_BIT) {
  4865. VULKAN_INTERNAL_CreateSubresourceView(
  4866. renderer,
  4867. texture,
  4868. i,
  4869. j,
  4870. swizzle,
  4871. &texture->subresources[subresourceIndex].depthStencilView);
  4872. }
  4873. texture->subresources[subresourceIndex].parent = texture;
  4874. texture->subresources[subresourceIndex].layer = i;
  4875. texture->subresources[subresourceIndex].level = j;
  4876. texture->subresources[subresourceIndex].msaaTexHandle = NULL;
  4877. texture->subresources[subresourceIndex].transitioned = false;
  4878. if (
  4879. sampleCount > VK_SAMPLE_COUNT_1_BIT &&
  4880. isRenderTarget &&
  4881. !isMSAAColorTarget &&
  4882. !VULKAN_INTERNAL_IsVulkanDepthFormat(texture->format)) {
  4883. texture->subresources[subresourceIndex].msaaTexHandle = VULKAN_INTERNAL_CreateTextureHandle(
  4884. renderer,
  4885. texture->dimensions.width >> j,
  4886. texture->dimensions.height >> j,
  4887. 1,
  4888. 0,
  4889. 1,
  4890. 1,
  4891. sampleCount,
  4892. texture->format,
  4893. texture->swizzle,
  4894. aspectMask,
  4895. SDL_GPU_TEXTUREUSAGE_COLOR_TARGET_BIT,
  4896. true);
  4897. }
  4898. }
  4899. }
  4900. return texture;
  4901. }
  4902. static void VULKAN_INTERNAL_CycleActiveBuffer(
  4903. VulkanRenderer *renderer,
  4904. VulkanBufferContainer *bufferContainer)
  4905. {
  4906. VulkanBufferHandle *bufferHandle;
  4907. Uint32 i;
  4908. // If a previously-cycled buffer is available, we can use that.
  4909. for (i = 0; i < bufferContainer->bufferCount; i += 1) {
  4910. bufferHandle = bufferContainer->bufferHandles[i];
  4911. if (SDL_AtomicGet(&bufferHandle->vulkanBuffer->referenceCount) == 0) {
  4912. bufferContainer->activeBufferHandle = bufferHandle;
  4913. return;
  4914. }
  4915. }
  4916. // No buffer handle is available, generate a new one.
  4917. bufferContainer->activeBufferHandle = VULKAN_INTERNAL_CreateBufferHandle(
  4918. renderer,
  4919. bufferContainer->activeBufferHandle->vulkanBuffer->size,
  4920. bufferContainer->activeBufferHandle->vulkanBuffer->usageFlags,
  4921. bufferContainer->activeBufferHandle->vulkanBuffer->type);
  4922. bufferContainer->activeBufferHandle->container = bufferContainer;
  4923. EXPAND_ARRAY_IF_NEEDED(
  4924. bufferContainer->bufferHandles,
  4925. VulkanBufferHandle *,
  4926. bufferContainer->bufferCount + 1,
  4927. bufferContainer->bufferCapacity,
  4928. bufferContainer->bufferCapacity * 2);
  4929. bufferContainer->bufferHandles[bufferContainer->bufferCount] = bufferContainer->activeBufferHandle;
  4930. bufferContainer->bufferCount += 1;
  4931. if (
  4932. renderer->debugMode &&
  4933. renderer->supportsDebugUtils &&
  4934. bufferContainer->debugName != NULL) {
  4935. VULKAN_INTERNAL_SetBufferName(
  4936. renderer,
  4937. bufferContainer->activeBufferHandle->vulkanBuffer,
  4938. bufferContainer->debugName);
  4939. }
  4940. }
  4941. static void VULKAN_INTERNAL_CycleActiveTexture(
  4942. VulkanRenderer *renderer,
  4943. VulkanTextureContainer *textureContainer)
  4944. {
  4945. // If a previously-cycled texture is available, we can use that.
  4946. for (Uint32 i = 0; i < textureContainer->textureCount; i += 1) {
  4947. VulkanTextureHandle *textureHandle = textureContainer->textureHandles[i];
  4948. if (SDL_AtomicGet(&textureHandle->vulkanTexture->referenceCount) == 0) {
  4949. textureContainer->activeTextureHandle = textureHandle;
  4950. return;
  4951. }
  4952. }
  4953. // No texture handle is available, generate a new one.
  4954. textureContainer->activeTextureHandle = VULKAN_INTERNAL_CreateTextureHandle(
  4955. renderer,
  4956. textureContainer->activeTextureHandle->vulkanTexture->dimensions.width,
  4957. textureContainer->activeTextureHandle->vulkanTexture->dimensions.height,
  4958. textureContainer->activeTextureHandle->vulkanTexture->depth,
  4959. textureContainer->activeTextureHandle->vulkanTexture->type,
  4960. textureContainer->activeTextureHandle->vulkanTexture->layerCount,
  4961. textureContainer->activeTextureHandle->vulkanTexture->levelCount,
  4962. textureContainer->activeTextureHandle->vulkanTexture->sampleCount,
  4963. textureContainer->activeTextureHandle->vulkanTexture->format,
  4964. textureContainer->activeTextureHandle->vulkanTexture->swizzle,
  4965. textureContainer->activeTextureHandle->vulkanTexture->aspectFlags,
  4966. textureContainer->activeTextureHandle->vulkanTexture->usageFlags,
  4967. false);
  4968. textureContainer->activeTextureHandle->container = textureContainer;
  4969. EXPAND_ARRAY_IF_NEEDED(
  4970. textureContainer->textureHandles,
  4971. VulkanTextureHandle *,
  4972. textureContainer->textureCount + 1,
  4973. textureContainer->textureCapacity,
  4974. textureContainer->textureCapacity * 2);
  4975. textureContainer->textureHandles[textureContainer->textureCount] = textureContainer->activeTextureHandle;
  4976. textureContainer->textureCount += 1;
  4977. if (
  4978. renderer->debugMode &&
  4979. renderer->supportsDebugUtils &&
  4980. textureContainer->debugName != NULL) {
  4981. VULKAN_INTERNAL_SetTextureName(
  4982. renderer,
  4983. textureContainer->activeTextureHandle->vulkanTexture,
  4984. textureContainer->debugName);
  4985. }
  4986. }
  4987. static VulkanBuffer *VULKAN_INTERNAL_PrepareBufferForWrite(
  4988. VulkanRenderer *renderer,
  4989. VulkanCommandBuffer *commandBuffer,
  4990. VulkanBufferContainer *bufferContainer,
  4991. bool cycle,
  4992. VulkanBufferUsageMode destinationUsageMode)
  4993. {
  4994. if (
  4995. cycle &&
  4996. SDL_AtomicGet(&bufferContainer->activeBufferHandle->vulkanBuffer->referenceCount) > 0) {
  4997. VULKAN_INTERNAL_CycleActiveBuffer(
  4998. renderer,
  4999. bufferContainer);
  5000. }
  5001. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  5002. renderer,
  5003. commandBuffer,
  5004. destinationUsageMode,
  5005. bufferContainer->activeBufferHandle->vulkanBuffer);
  5006. return bufferContainer->activeBufferHandle->vulkanBuffer;
  5007. }
  5008. static VulkanTextureSubresource *VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  5009. VulkanRenderer *renderer,
  5010. VulkanCommandBuffer *commandBuffer,
  5011. VulkanTextureContainer *textureContainer,
  5012. Uint32 layer,
  5013. Uint32 level,
  5014. bool cycle,
  5015. VulkanTextureUsageMode destinationUsageMode)
  5016. {
  5017. VulkanTextureSubresource *textureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  5018. textureContainer,
  5019. layer,
  5020. level);
  5021. if (
  5022. cycle &&
  5023. textureContainer->canBeCycled &&
  5024. SDL_AtomicGet(&textureContainer->activeTextureHandle->vulkanTexture->referenceCount) > 0) {
  5025. VULKAN_INTERNAL_CycleActiveTexture(
  5026. renderer,
  5027. textureContainer);
  5028. textureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  5029. textureContainer,
  5030. layer,
  5031. level);
  5032. }
  5033. // always do barrier because of layout transitions
  5034. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  5035. renderer,
  5036. commandBuffer,
  5037. destinationUsageMode,
  5038. textureSubresource);
  5039. return textureSubresource;
  5040. }
  5041. static VkRenderPass VULKAN_INTERNAL_CreateRenderPass(
  5042. VulkanRenderer *renderer,
  5043. VulkanCommandBuffer *commandBuffer,
  5044. SDL_GPUColorAttachmentInfo *colorAttachmentInfos,
  5045. Uint32 colorAttachmentCount,
  5046. SDL_GPUDepthStencilAttachmentInfo *depthStencilAttachmentInfo)
  5047. {
  5048. VkResult vulkanResult;
  5049. VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1];
  5050. VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
  5051. VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS + 1];
  5052. VkAttachmentReference depthStencilAttachmentReference;
  5053. VkRenderPassCreateInfo renderPassCreateInfo;
  5054. VkSubpassDescription subpass;
  5055. VkRenderPass renderPass;
  5056. Uint32 i;
  5057. Uint32 attachmentDescriptionCount = 0;
  5058. Uint32 colorAttachmentReferenceCount = 0;
  5059. Uint32 resolveReferenceCount = 0;
  5060. VulkanTexture *texture = NULL;
  5061. for (i = 0; i < colorAttachmentCount; i += 1) {
  5062. texture = ((VulkanTextureContainer *)colorAttachmentInfos[i].texture)->activeTextureHandle->vulkanTexture;
  5063. if (texture->sampleCount > VK_SAMPLE_COUNT_1_BIT) {
  5064. // Resolve attachment and multisample attachment
  5065. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5066. attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
  5067. attachmentDescriptions[attachmentDescriptionCount].samples =
  5068. VK_SAMPLE_COUNT_1_BIT;
  5069. attachmentDescriptions[attachmentDescriptionCount].loadOp = SDLToVK_LoadOp[colorAttachmentInfos[i].loadOp];
  5070. attachmentDescriptions[attachmentDescriptionCount].storeOp =
  5071. VK_ATTACHMENT_STORE_OP_STORE; // Always store the resolve texture
  5072. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
  5073. VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5074. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
  5075. VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5076. attachmentDescriptions[attachmentDescriptionCount].initialLayout =
  5077. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5078. attachmentDescriptions[attachmentDescriptionCount].finalLayout =
  5079. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5080. resolveReferences[resolveReferenceCount].attachment =
  5081. attachmentDescriptionCount;
  5082. resolveReferences[resolveReferenceCount].layout =
  5083. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5084. attachmentDescriptionCount += 1;
  5085. resolveReferenceCount += 1;
  5086. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5087. attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
  5088. attachmentDescriptions[attachmentDescriptionCount].samples = texture->sampleCount;
  5089. attachmentDescriptions[attachmentDescriptionCount].loadOp = SDLToVK_LoadOp[colorAttachmentInfos[i].loadOp];
  5090. attachmentDescriptions[attachmentDescriptionCount].storeOp = SDLToVK_StoreOp[colorAttachmentInfos[i].storeOp];
  5091. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
  5092. VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5093. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
  5094. VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5095. attachmentDescriptions[attachmentDescriptionCount].initialLayout =
  5096. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5097. attachmentDescriptions[attachmentDescriptionCount].finalLayout =
  5098. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5099. colorAttachmentReferences[colorAttachmentReferenceCount].attachment =
  5100. attachmentDescriptionCount;
  5101. colorAttachmentReferences[colorAttachmentReferenceCount].layout =
  5102. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5103. attachmentDescriptionCount += 1;
  5104. colorAttachmentReferenceCount += 1;
  5105. } else {
  5106. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5107. attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
  5108. attachmentDescriptions[attachmentDescriptionCount].samples =
  5109. VK_SAMPLE_COUNT_1_BIT;
  5110. attachmentDescriptions[attachmentDescriptionCount].loadOp = SDLToVK_LoadOp[colorAttachmentInfos[i].loadOp];
  5111. attachmentDescriptions[attachmentDescriptionCount].storeOp =
  5112. VK_ATTACHMENT_STORE_OP_STORE; // Always store non-MSAA textures
  5113. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
  5114. VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5115. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
  5116. VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5117. attachmentDescriptions[attachmentDescriptionCount].initialLayout =
  5118. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5119. attachmentDescriptions[attachmentDescriptionCount].finalLayout =
  5120. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5121. colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
  5122. colorAttachmentReferences[colorAttachmentReferenceCount].layout =
  5123. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5124. attachmentDescriptionCount += 1;
  5125. colorAttachmentReferenceCount += 1;
  5126. }
  5127. }
  5128. subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  5129. subpass.flags = 0;
  5130. subpass.inputAttachmentCount = 0;
  5131. subpass.pInputAttachments = NULL;
  5132. subpass.colorAttachmentCount = colorAttachmentCount;
  5133. subpass.pColorAttachments = colorAttachmentReferences;
  5134. subpass.preserveAttachmentCount = 0;
  5135. subpass.pPreserveAttachments = NULL;
  5136. if (depthStencilAttachmentInfo == NULL) {
  5137. subpass.pDepthStencilAttachment = NULL;
  5138. } else {
  5139. texture = ((VulkanTextureContainer *)depthStencilAttachmentInfo->texture)->activeTextureHandle->vulkanTexture;
  5140. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5141. attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
  5142. attachmentDescriptions[attachmentDescriptionCount].samples = texture->sampleCount;
  5143. attachmentDescriptions[attachmentDescriptionCount].loadOp = SDLToVK_LoadOp[depthStencilAttachmentInfo->loadOp];
  5144. attachmentDescriptions[attachmentDescriptionCount].storeOp = SDLToVK_StoreOp[depthStencilAttachmentInfo->storeOp];
  5145. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = SDLToVK_LoadOp[depthStencilAttachmentInfo->stencilLoadOp];
  5146. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = SDLToVK_StoreOp[depthStencilAttachmentInfo->stencilStoreOp];
  5147. attachmentDescriptions[attachmentDescriptionCount].initialLayout =
  5148. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5149. attachmentDescriptions[attachmentDescriptionCount].finalLayout =
  5150. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5151. depthStencilAttachmentReference.attachment =
  5152. attachmentDescriptionCount;
  5153. depthStencilAttachmentReference.layout =
  5154. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5155. subpass.pDepthStencilAttachment =
  5156. &depthStencilAttachmentReference;
  5157. attachmentDescriptionCount += 1;
  5158. }
  5159. if (texture != NULL && texture->sampleCount > VK_SAMPLE_COUNT_1_BIT) {
  5160. subpass.pResolveAttachments = resolveReferences;
  5161. } else {
  5162. subpass.pResolveAttachments = NULL;
  5163. }
  5164. renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  5165. renderPassCreateInfo.pNext = NULL;
  5166. renderPassCreateInfo.flags = 0;
  5167. renderPassCreateInfo.pAttachments = attachmentDescriptions;
  5168. renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
  5169. renderPassCreateInfo.subpassCount = 1;
  5170. renderPassCreateInfo.pSubpasses = &subpass;
  5171. renderPassCreateInfo.dependencyCount = 0;
  5172. renderPassCreateInfo.pDependencies = NULL;
  5173. vulkanResult = renderer->vkCreateRenderPass(
  5174. renderer->logicalDevice,
  5175. &renderPassCreateInfo,
  5176. NULL,
  5177. &renderPass);
  5178. if (vulkanResult != VK_SUCCESS) {
  5179. renderPass = VK_NULL_HANDLE;
  5180. LogVulkanResultAsError("vkCreateRenderPass", vulkanResult);
  5181. }
  5182. return renderPass;
  5183. }
  5184. static VkRenderPass VULKAN_INTERNAL_CreateTransientRenderPass(
  5185. VulkanRenderer *renderer,
  5186. SDL_GPUGraphicsPipelineAttachmentInfo attachmentInfo,
  5187. VkSampleCountFlagBits sampleCount)
  5188. {
  5189. VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1];
  5190. VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
  5191. VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS + 1];
  5192. VkAttachmentReference depthStencilAttachmentReference;
  5193. SDL_GPUColorAttachmentDescription attachmentDescription;
  5194. VkSubpassDescription subpass;
  5195. VkRenderPassCreateInfo renderPassCreateInfo;
  5196. VkRenderPass renderPass;
  5197. VkResult result;
  5198. Uint32 multisampling = 0;
  5199. Uint32 attachmentDescriptionCount = 0;
  5200. Uint32 colorAttachmentReferenceCount = 0;
  5201. Uint32 resolveReferenceCount = 0;
  5202. Uint32 i;
  5203. for (i = 0; i < attachmentInfo.colorAttachmentCount; i += 1) {
  5204. attachmentDescription = attachmentInfo.colorAttachmentDescriptions[i];
  5205. if (sampleCount > VK_SAMPLE_COUNT_1_BIT) {
  5206. multisampling = 1;
  5207. // Resolve attachment and multisample attachment
  5208. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5209. attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_SurfaceFormat[attachmentDescription.format];
  5210. attachmentDescriptions[attachmentDescriptionCount].samples = VK_SAMPLE_COUNT_1_BIT;
  5211. attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5212. attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5213. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5214. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5215. attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5216. attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5217. resolveReferences[resolveReferenceCount].attachment = attachmentDescriptionCount;
  5218. resolveReferences[resolveReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5219. attachmentDescriptionCount += 1;
  5220. resolveReferenceCount += 1;
  5221. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5222. attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_SurfaceFormat[attachmentDescription.format];
  5223. attachmentDescriptions[attachmentDescriptionCount].samples = sampleCount;
  5224. attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5225. attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5226. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5227. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5228. attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5229. attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5230. colorAttachmentReferences[colorAttachmentReferenceCount].attachment =
  5231. attachmentDescriptionCount;
  5232. colorAttachmentReferences[colorAttachmentReferenceCount].layout =
  5233. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5234. attachmentDescriptionCount += 1;
  5235. colorAttachmentReferenceCount += 1;
  5236. } else {
  5237. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5238. attachmentDescriptions[attachmentDescriptionCount].format = SDLToVK_SurfaceFormat[attachmentDescription.format];
  5239. attachmentDescriptions[attachmentDescriptionCount].samples =
  5240. VK_SAMPLE_COUNT_1_BIT;
  5241. attachmentDescriptions[attachmentDescriptionCount].loadOp =
  5242. VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5243. attachmentDescriptions[attachmentDescriptionCount].storeOp =
  5244. VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5245. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
  5246. VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5247. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
  5248. VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5249. attachmentDescriptions[attachmentDescriptionCount].initialLayout =
  5250. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5251. attachmentDescriptions[attachmentDescriptionCount].finalLayout =
  5252. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5253. colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
  5254. colorAttachmentReferences[colorAttachmentReferenceCount].layout =
  5255. VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  5256. attachmentDescriptionCount += 1;
  5257. colorAttachmentReferenceCount += 1;
  5258. }
  5259. }
  5260. subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  5261. subpass.flags = 0;
  5262. subpass.inputAttachmentCount = 0;
  5263. subpass.pInputAttachments = NULL;
  5264. subpass.colorAttachmentCount = attachmentInfo.colorAttachmentCount;
  5265. subpass.pColorAttachments = colorAttachmentReferences;
  5266. subpass.preserveAttachmentCount = 0;
  5267. subpass.pPreserveAttachments = NULL;
  5268. if (attachmentInfo.hasDepthStencilAttachment) {
  5269. attachmentDescriptions[attachmentDescriptionCount].flags = 0;
  5270. attachmentDescriptions[attachmentDescriptionCount].format =
  5271. SDLToVK_SurfaceFormat[attachmentInfo.depthStencilFormat];
  5272. attachmentDescriptions[attachmentDescriptionCount].samples = sampleCount;
  5273. attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5274. attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5275. attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  5276. attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  5277. attachmentDescriptions[attachmentDescriptionCount].initialLayout =
  5278. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5279. attachmentDescriptions[attachmentDescriptionCount].finalLayout =
  5280. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5281. depthStencilAttachmentReference.attachment =
  5282. attachmentDescriptionCount;
  5283. depthStencilAttachmentReference.layout =
  5284. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  5285. subpass.pDepthStencilAttachment =
  5286. &depthStencilAttachmentReference;
  5287. attachmentDescriptionCount += 1;
  5288. } else {
  5289. subpass.pDepthStencilAttachment = NULL;
  5290. }
  5291. if (multisampling) {
  5292. subpass.pResolveAttachments = resolveReferences;
  5293. } else {
  5294. subpass.pResolveAttachments = NULL;
  5295. }
  5296. renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  5297. renderPassCreateInfo.pNext = NULL;
  5298. renderPassCreateInfo.flags = 0;
  5299. renderPassCreateInfo.pAttachments = attachmentDescriptions;
  5300. renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
  5301. renderPassCreateInfo.subpassCount = 1;
  5302. renderPassCreateInfo.pSubpasses = &subpass;
  5303. renderPassCreateInfo.dependencyCount = 0;
  5304. renderPassCreateInfo.pDependencies = NULL;
  5305. result = renderer->vkCreateRenderPass(
  5306. renderer->logicalDevice,
  5307. &renderPassCreateInfo,
  5308. NULL,
  5309. &renderPass);
  5310. if (result != VK_SUCCESS) {
  5311. renderPass = VK_NULL_HANDLE;
  5312. LogVulkanResultAsError("vkCreateRenderPass", result);
  5313. }
  5314. return renderPass;
  5315. }
  5316. static SDL_GPUGraphicsPipeline *VULKAN_CreateGraphicsPipeline(
  5317. SDL_GPURenderer *driverData,
  5318. SDL_GPUGraphicsPipelineCreateInfo *pipelineCreateInfo)
  5319. {
  5320. VkResult vulkanResult;
  5321. Uint32 i;
  5322. VkSampleCountFlagBits actualSampleCount;
  5323. VulkanGraphicsPipeline *graphicsPipeline = (VulkanGraphicsPipeline *)SDL_malloc(sizeof(VulkanGraphicsPipeline));
  5324. VkGraphicsPipelineCreateInfo vkPipelineCreateInfo;
  5325. VkPipelineShaderStageCreateInfo shaderStageCreateInfos[2];
  5326. VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo;
  5327. VkPipelineVertexInputDivisorStateCreateInfoEXT divisorStateCreateInfo;
  5328. VkVertexInputBindingDescription *vertexInputBindingDescriptions = SDL_stack_alloc(VkVertexInputBindingDescription, pipelineCreateInfo->vertexInputState.vertexBindingCount);
  5329. VkVertexInputAttributeDescription *vertexInputAttributeDescriptions = SDL_stack_alloc(VkVertexInputAttributeDescription, pipelineCreateInfo->vertexInputState.vertexAttributeCount);
  5330. VkVertexInputBindingDivisorDescriptionEXT *divisorDescriptions = SDL_stack_alloc(VkVertexInputBindingDivisorDescriptionEXT, pipelineCreateInfo->vertexInputState.vertexBindingCount);
  5331. Uint32 divisorDescriptionCount = 0;
  5332. VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo;
  5333. VkPipelineViewportStateCreateInfo viewportStateCreateInfo;
  5334. VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo;
  5335. VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo;
  5336. VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo;
  5337. VkStencilOpState frontStencilState;
  5338. VkStencilOpState backStencilState;
  5339. VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo;
  5340. VkPipelineColorBlendAttachmentState *colorBlendAttachmentStates = SDL_stack_alloc(
  5341. VkPipelineColorBlendAttachmentState,
  5342. pipelineCreateInfo->attachmentInfo.colorAttachmentCount);
  5343. static const VkDynamicState dynamicStates[] = {
  5344. VK_DYNAMIC_STATE_VIEWPORT,
  5345. VK_DYNAMIC_STATE_SCISSOR
  5346. };
  5347. VkPipelineDynamicStateCreateInfo dynamicStateCreateInfo;
  5348. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5349. // Find a compatible sample count to use
  5350. actualSampleCount = VULKAN_INTERNAL_GetMaxMultiSampleCount(
  5351. renderer,
  5352. SDLToVK_SampleCount[pipelineCreateInfo->multisampleState.sampleCount]);
  5353. // Create a "compatible" render pass
  5354. VkRenderPass transientRenderPass = VULKAN_INTERNAL_CreateTransientRenderPass(
  5355. renderer,
  5356. pipelineCreateInfo->attachmentInfo,
  5357. actualSampleCount);
  5358. // Dynamic state
  5359. dynamicStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
  5360. dynamicStateCreateInfo.pNext = NULL;
  5361. dynamicStateCreateInfo.flags = 0;
  5362. dynamicStateCreateInfo.dynamicStateCount = SDL_arraysize(dynamicStates);
  5363. dynamicStateCreateInfo.pDynamicStates = dynamicStates;
  5364. // Shader stages
  5365. graphicsPipeline->vertexShader = (VulkanShader *)pipelineCreateInfo->vertexShader;
  5366. SDL_AtomicIncRef(&graphicsPipeline->vertexShader->referenceCount);
  5367. shaderStageCreateInfos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  5368. shaderStageCreateInfos[0].pNext = NULL;
  5369. shaderStageCreateInfos[0].flags = 0;
  5370. shaderStageCreateInfos[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
  5371. shaderStageCreateInfos[0].module = graphicsPipeline->vertexShader->shaderModule;
  5372. shaderStageCreateInfos[0].pName = graphicsPipeline->vertexShader->entryPointName;
  5373. shaderStageCreateInfos[0].pSpecializationInfo = NULL;
  5374. graphicsPipeline->fragmentShader = (VulkanShader *)pipelineCreateInfo->fragmentShader;
  5375. SDL_AtomicIncRef(&graphicsPipeline->fragmentShader->referenceCount);
  5376. shaderStageCreateInfos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  5377. shaderStageCreateInfos[1].pNext = NULL;
  5378. shaderStageCreateInfos[1].flags = 0;
  5379. shaderStageCreateInfos[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
  5380. shaderStageCreateInfos[1].module = graphicsPipeline->fragmentShader->shaderModule;
  5381. shaderStageCreateInfos[1].pName = graphicsPipeline->fragmentShader->entryPointName;
  5382. shaderStageCreateInfos[1].pSpecializationInfo = NULL;
  5383. // Vertex input
  5384. for (i = 0; i < pipelineCreateInfo->vertexInputState.vertexBindingCount; i += 1) {
  5385. vertexInputBindingDescriptions[i].binding = pipelineCreateInfo->vertexInputState.vertexBindings[i].binding;
  5386. vertexInputBindingDescriptions[i].inputRate = SDLToVK_VertexInputRate[pipelineCreateInfo->vertexInputState.vertexBindings[i].inputRate];
  5387. vertexInputBindingDescriptions[i].stride = pipelineCreateInfo->vertexInputState.vertexBindings[i].stride;
  5388. if (pipelineCreateInfo->vertexInputState.vertexBindings[i].inputRate == SDL_GPU_VERTEXINPUTRATE_INSTANCE) {
  5389. divisorDescriptionCount += 1;
  5390. }
  5391. }
  5392. for (i = 0; i < pipelineCreateInfo->vertexInputState.vertexAttributeCount; i += 1) {
  5393. vertexInputAttributeDescriptions[i].binding = pipelineCreateInfo->vertexInputState.vertexAttributes[i].binding;
  5394. vertexInputAttributeDescriptions[i].format = SDLToVK_VertexFormat[pipelineCreateInfo->vertexInputState.vertexAttributes[i].format];
  5395. vertexInputAttributeDescriptions[i].location = pipelineCreateInfo->vertexInputState.vertexAttributes[i].location;
  5396. vertexInputAttributeDescriptions[i].offset = pipelineCreateInfo->vertexInputState.vertexAttributes[i].offset;
  5397. }
  5398. vertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  5399. vertexInputStateCreateInfo.pNext = NULL;
  5400. vertexInputStateCreateInfo.flags = 0;
  5401. vertexInputStateCreateInfo.vertexBindingDescriptionCount = pipelineCreateInfo->vertexInputState.vertexBindingCount;
  5402. vertexInputStateCreateInfo.pVertexBindingDescriptions = vertexInputBindingDescriptions;
  5403. vertexInputStateCreateInfo.vertexAttributeDescriptionCount = pipelineCreateInfo->vertexInputState.vertexAttributeCount;
  5404. vertexInputStateCreateInfo.pVertexAttributeDescriptions = vertexInputAttributeDescriptions;
  5405. if (divisorDescriptionCount > 0) {
  5406. divisorDescriptionCount = 0;
  5407. for (i = 0; i < pipelineCreateInfo->vertexInputState.vertexBindingCount; i += 1) {
  5408. if (pipelineCreateInfo->vertexInputState.vertexBindings[i].inputRate == SDL_GPU_VERTEXINPUTRATE_INSTANCE) {
  5409. divisorDescriptions[divisorDescriptionCount].binding = pipelineCreateInfo->vertexInputState.vertexBindings[i].binding;
  5410. divisorDescriptions[divisorDescriptionCount].divisor = pipelineCreateInfo->vertexInputState.vertexBindings[i].instanceStepRate;
  5411. divisorDescriptionCount += 1;
  5412. }
  5413. }
  5414. divisorStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_DIVISOR_STATE_CREATE_INFO_EXT;
  5415. divisorStateCreateInfo.pNext = NULL;
  5416. divisorStateCreateInfo.vertexBindingDivisorCount = divisorDescriptionCount;
  5417. divisorStateCreateInfo.pVertexBindingDivisors = divisorDescriptions;
  5418. vertexInputStateCreateInfo.pNext = &divisorStateCreateInfo;
  5419. }
  5420. // Topology
  5421. inputAssemblyStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
  5422. inputAssemblyStateCreateInfo.pNext = NULL;
  5423. inputAssemblyStateCreateInfo.flags = 0;
  5424. inputAssemblyStateCreateInfo.primitiveRestartEnable = VK_FALSE;
  5425. inputAssemblyStateCreateInfo.topology = SDLToVK_PrimitiveType[pipelineCreateInfo->primitiveType];
  5426. graphicsPipeline->primitiveType = pipelineCreateInfo->primitiveType;
  5427. // Viewport
  5428. // NOTE: viewport and scissor are dynamic, and must be set using the command buffer
  5429. viewportStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
  5430. viewportStateCreateInfo.pNext = NULL;
  5431. viewportStateCreateInfo.flags = 0;
  5432. viewportStateCreateInfo.viewportCount = 1;
  5433. viewportStateCreateInfo.pViewports = NULL;
  5434. viewportStateCreateInfo.scissorCount = 1;
  5435. viewportStateCreateInfo.pScissors = NULL;
  5436. // Rasterization
  5437. rasterizationStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
  5438. rasterizationStateCreateInfo.pNext = NULL;
  5439. rasterizationStateCreateInfo.flags = 0;
  5440. rasterizationStateCreateInfo.depthClampEnable = VK_FALSE;
  5441. rasterizationStateCreateInfo.rasterizerDiscardEnable = VK_FALSE;
  5442. rasterizationStateCreateInfo.polygonMode = SDLToVK_PolygonMode(
  5443. renderer,
  5444. pipelineCreateInfo->rasterizerState.fillMode);
  5445. rasterizationStateCreateInfo.cullMode = SDLToVK_CullMode[pipelineCreateInfo->rasterizerState.cullMode];
  5446. rasterizationStateCreateInfo.frontFace = SDLToVK_FrontFace[pipelineCreateInfo->rasterizerState.frontFace];
  5447. rasterizationStateCreateInfo.depthBiasEnable =
  5448. pipelineCreateInfo->rasterizerState.depthBiasEnable;
  5449. rasterizationStateCreateInfo.depthBiasConstantFactor =
  5450. pipelineCreateInfo->rasterizerState.depthBiasConstantFactor;
  5451. rasterizationStateCreateInfo.depthBiasClamp =
  5452. pipelineCreateInfo->rasterizerState.depthBiasClamp;
  5453. rasterizationStateCreateInfo.depthBiasSlopeFactor =
  5454. pipelineCreateInfo->rasterizerState.depthBiasSlopeFactor;
  5455. rasterizationStateCreateInfo.lineWidth = 1.0f;
  5456. // Multisample
  5457. multisampleStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
  5458. multisampleStateCreateInfo.pNext = NULL;
  5459. multisampleStateCreateInfo.flags = 0;
  5460. multisampleStateCreateInfo.rasterizationSamples = actualSampleCount;
  5461. multisampleStateCreateInfo.sampleShadingEnable = VK_FALSE;
  5462. multisampleStateCreateInfo.minSampleShading = 1.0f;
  5463. multisampleStateCreateInfo.pSampleMask =
  5464. &pipelineCreateInfo->multisampleState.sampleMask;
  5465. multisampleStateCreateInfo.alphaToCoverageEnable = VK_FALSE;
  5466. multisampleStateCreateInfo.alphaToOneEnable = VK_FALSE;
  5467. // Depth Stencil State
  5468. frontStencilState.failOp = SDLToVK_StencilOp[pipelineCreateInfo->depthStencilState.frontStencilState.failOp];
  5469. frontStencilState.passOp = SDLToVK_StencilOp[pipelineCreateInfo->depthStencilState.frontStencilState.passOp];
  5470. frontStencilState.depthFailOp = SDLToVK_StencilOp[pipelineCreateInfo->depthStencilState.frontStencilState.depthFailOp];
  5471. frontStencilState.compareOp = SDLToVK_CompareOp[pipelineCreateInfo->depthStencilState.frontStencilState.compareOp];
  5472. frontStencilState.compareMask =
  5473. pipelineCreateInfo->depthStencilState.compareMask;
  5474. frontStencilState.writeMask =
  5475. pipelineCreateInfo->depthStencilState.writeMask;
  5476. frontStencilState.reference =
  5477. pipelineCreateInfo->depthStencilState.reference;
  5478. backStencilState.failOp = SDLToVK_StencilOp[pipelineCreateInfo->depthStencilState.backStencilState.failOp];
  5479. backStencilState.passOp = SDLToVK_StencilOp[pipelineCreateInfo->depthStencilState.backStencilState.passOp];
  5480. backStencilState.depthFailOp = SDLToVK_StencilOp[pipelineCreateInfo->depthStencilState.backStencilState.depthFailOp];
  5481. backStencilState.compareOp = SDLToVK_CompareOp[pipelineCreateInfo->depthStencilState.backStencilState.compareOp];
  5482. backStencilState.compareMask =
  5483. pipelineCreateInfo->depthStencilState.compareMask;
  5484. backStencilState.writeMask =
  5485. pipelineCreateInfo->depthStencilState.writeMask;
  5486. backStencilState.reference =
  5487. pipelineCreateInfo->depthStencilState.reference;
  5488. depthStencilStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
  5489. depthStencilStateCreateInfo.pNext = NULL;
  5490. depthStencilStateCreateInfo.flags = 0;
  5491. depthStencilStateCreateInfo.depthTestEnable =
  5492. pipelineCreateInfo->depthStencilState.depthTestEnable;
  5493. depthStencilStateCreateInfo.depthWriteEnable =
  5494. pipelineCreateInfo->depthStencilState.depthWriteEnable;
  5495. depthStencilStateCreateInfo.depthCompareOp = SDLToVK_CompareOp[pipelineCreateInfo->depthStencilState.compareOp];
  5496. depthStencilStateCreateInfo.depthBoundsTestEnable = VK_FALSE;
  5497. depthStencilStateCreateInfo.stencilTestEnable =
  5498. pipelineCreateInfo->depthStencilState.stencilTestEnable;
  5499. depthStencilStateCreateInfo.front = frontStencilState;
  5500. depthStencilStateCreateInfo.back = backStencilState;
  5501. depthStencilStateCreateInfo.minDepthBounds = 0; // unused
  5502. depthStencilStateCreateInfo.maxDepthBounds = 0; // unused
  5503. // Color Blend
  5504. for (i = 0; i < pipelineCreateInfo->attachmentInfo.colorAttachmentCount; i += 1) {
  5505. SDL_GPUColorAttachmentBlendState blendState = pipelineCreateInfo->attachmentInfo.colorAttachmentDescriptions[i].blendState;
  5506. colorBlendAttachmentStates[i].blendEnable =
  5507. blendState.blendEnable;
  5508. colorBlendAttachmentStates[i].srcColorBlendFactor = SDLToVK_BlendFactor[blendState.srcColorBlendFactor];
  5509. colorBlendAttachmentStates[i].dstColorBlendFactor = SDLToVK_BlendFactor[blendState.dstColorBlendFactor];
  5510. colorBlendAttachmentStates[i].colorBlendOp = SDLToVK_BlendOp[blendState.colorBlendOp];
  5511. colorBlendAttachmentStates[i].srcAlphaBlendFactor = SDLToVK_BlendFactor[blendState.srcAlphaBlendFactor];
  5512. colorBlendAttachmentStates[i].dstAlphaBlendFactor = SDLToVK_BlendFactor[blendState.dstAlphaBlendFactor];
  5513. colorBlendAttachmentStates[i].alphaBlendOp = SDLToVK_BlendOp[blendState.alphaBlendOp];
  5514. colorBlendAttachmentStates[i].colorWriteMask =
  5515. blendState.colorWriteMask;
  5516. }
  5517. colorBlendStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
  5518. colorBlendStateCreateInfo.pNext = NULL;
  5519. colorBlendStateCreateInfo.flags = 0;
  5520. colorBlendStateCreateInfo.attachmentCount =
  5521. pipelineCreateInfo->attachmentInfo.colorAttachmentCount;
  5522. colorBlendStateCreateInfo.pAttachments =
  5523. colorBlendAttachmentStates;
  5524. colorBlendStateCreateInfo.blendConstants[0] =
  5525. pipelineCreateInfo->blendConstants[0];
  5526. colorBlendStateCreateInfo.blendConstants[1] =
  5527. pipelineCreateInfo->blendConstants[1];
  5528. colorBlendStateCreateInfo.blendConstants[2] =
  5529. pipelineCreateInfo->blendConstants[2];
  5530. colorBlendStateCreateInfo.blendConstants[3] =
  5531. pipelineCreateInfo->blendConstants[3];
  5532. // We don't support LogicOp, so this is easy.
  5533. colorBlendStateCreateInfo.logicOpEnable = VK_FALSE;
  5534. colorBlendStateCreateInfo.logicOp = 0;
  5535. // Pipeline Layout
  5536. if (!VULKAN_INTERNAL_InitializeGraphicsPipelineResourceLayout(
  5537. renderer,
  5538. graphicsPipeline->vertexShader,
  5539. graphicsPipeline->fragmentShader,
  5540. &graphicsPipeline->resourceLayout)) {
  5541. SDL_stack_free(vertexInputBindingDescriptions);
  5542. SDL_stack_free(vertexInputAttributeDescriptions);
  5543. SDL_stack_free(colorBlendAttachmentStates);
  5544. SDL_stack_free(divisorDescriptions);
  5545. SDL_free(graphicsPipeline);
  5546. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to initialize pipeline resource layout!");
  5547. return NULL;
  5548. }
  5549. // Pipeline
  5550. vkPipelineCreateInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
  5551. vkPipelineCreateInfo.pNext = NULL;
  5552. vkPipelineCreateInfo.flags = 0;
  5553. vkPipelineCreateInfo.stageCount = 2;
  5554. vkPipelineCreateInfo.pStages = shaderStageCreateInfos;
  5555. vkPipelineCreateInfo.pVertexInputState = &vertexInputStateCreateInfo;
  5556. vkPipelineCreateInfo.pInputAssemblyState = &inputAssemblyStateCreateInfo;
  5557. vkPipelineCreateInfo.pTessellationState = VK_NULL_HANDLE;
  5558. vkPipelineCreateInfo.pViewportState = &viewportStateCreateInfo;
  5559. vkPipelineCreateInfo.pRasterizationState = &rasterizationStateCreateInfo;
  5560. vkPipelineCreateInfo.pMultisampleState = &multisampleStateCreateInfo;
  5561. vkPipelineCreateInfo.pDepthStencilState = &depthStencilStateCreateInfo;
  5562. vkPipelineCreateInfo.pColorBlendState = &colorBlendStateCreateInfo;
  5563. vkPipelineCreateInfo.pDynamicState = &dynamicStateCreateInfo;
  5564. vkPipelineCreateInfo.layout = graphicsPipeline->resourceLayout.pipelineLayout;
  5565. vkPipelineCreateInfo.renderPass = transientRenderPass;
  5566. vkPipelineCreateInfo.subpass = 0;
  5567. vkPipelineCreateInfo.basePipelineHandle = VK_NULL_HANDLE;
  5568. vkPipelineCreateInfo.basePipelineIndex = 0;
  5569. // TODO: enable pipeline caching
  5570. vulkanResult = renderer->vkCreateGraphicsPipelines(
  5571. renderer->logicalDevice,
  5572. VK_NULL_HANDLE,
  5573. 1,
  5574. &vkPipelineCreateInfo,
  5575. NULL,
  5576. &graphicsPipeline->pipeline);
  5577. SDL_stack_free(vertexInputBindingDescriptions);
  5578. SDL_stack_free(vertexInputAttributeDescriptions);
  5579. SDL_stack_free(colorBlendAttachmentStates);
  5580. SDL_stack_free(divisorDescriptions);
  5581. renderer->vkDestroyRenderPass(
  5582. renderer->logicalDevice,
  5583. transientRenderPass,
  5584. NULL);
  5585. if (vulkanResult != VK_SUCCESS) {
  5586. SDL_free(graphicsPipeline);
  5587. LogVulkanResultAsError("vkCreateGraphicsPipelines", vulkanResult);
  5588. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create graphics pipeline!");
  5589. return NULL;
  5590. }
  5591. SDL_AtomicSet(&graphicsPipeline->referenceCount, 0);
  5592. return (SDL_GPUGraphicsPipeline *)graphicsPipeline;
  5593. }
  5594. static SDL_GPUComputePipeline *VULKAN_CreateComputePipeline(
  5595. SDL_GPURenderer *driverData,
  5596. SDL_GPUComputePipelineCreateInfo *pipelineCreateInfo)
  5597. {
  5598. VkShaderModuleCreateInfo shaderModuleCreateInfo;
  5599. VkComputePipelineCreateInfo computePipelineCreateInfo;
  5600. VkPipelineShaderStageCreateInfo pipelineShaderStageCreateInfo;
  5601. VkResult vulkanResult;
  5602. Uint32 i;
  5603. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5604. VulkanComputePipeline *vulkanComputePipeline;
  5605. if (pipelineCreateInfo->format != SDL_GPU_SHADERFORMAT_SPIRV) {
  5606. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Incompatible shader format for Vulkan!");
  5607. return NULL;
  5608. }
  5609. vulkanComputePipeline = SDL_malloc(sizeof(VulkanComputePipeline));
  5610. shaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  5611. shaderModuleCreateInfo.pNext = NULL;
  5612. shaderModuleCreateInfo.flags = 0;
  5613. shaderModuleCreateInfo.codeSize = pipelineCreateInfo->codeSize;
  5614. shaderModuleCreateInfo.pCode = (Uint32 *)pipelineCreateInfo->code;
  5615. vulkanResult = renderer->vkCreateShaderModule(
  5616. renderer->logicalDevice,
  5617. &shaderModuleCreateInfo,
  5618. NULL,
  5619. &vulkanComputePipeline->shaderModule);
  5620. if (vulkanResult != VK_SUCCESS) {
  5621. SDL_free(vulkanComputePipeline);
  5622. LogVulkanResultAsError("vkCreateShaderModule", vulkanResult);
  5623. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create compute pipeline!");
  5624. return NULL;
  5625. }
  5626. pipelineShaderStageCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  5627. pipelineShaderStageCreateInfo.pNext = NULL;
  5628. pipelineShaderStageCreateInfo.flags = 0;
  5629. pipelineShaderStageCreateInfo.stage = VK_SHADER_STAGE_COMPUTE_BIT;
  5630. pipelineShaderStageCreateInfo.module = vulkanComputePipeline->shaderModule;
  5631. pipelineShaderStageCreateInfo.pName = pipelineCreateInfo->entryPointName;
  5632. pipelineShaderStageCreateInfo.pSpecializationInfo = NULL;
  5633. if (!VULKAN_INTERNAL_InitializeComputePipelineResourceLayout(
  5634. renderer,
  5635. pipelineCreateInfo,
  5636. &vulkanComputePipeline->resourceLayout)) {
  5637. renderer->vkDestroyShaderModule(
  5638. renderer->logicalDevice,
  5639. vulkanComputePipeline->shaderModule,
  5640. NULL);
  5641. SDL_free(vulkanComputePipeline);
  5642. return NULL;
  5643. }
  5644. computePipelineCreateInfo.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
  5645. computePipelineCreateInfo.pNext = NULL;
  5646. computePipelineCreateInfo.flags = 0;
  5647. computePipelineCreateInfo.stage = pipelineShaderStageCreateInfo;
  5648. computePipelineCreateInfo.layout =
  5649. vulkanComputePipeline->resourceLayout.pipelineLayout;
  5650. computePipelineCreateInfo.basePipelineHandle = (VkPipeline)VK_NULL_HANDLE;
  5651. computePipelineCreateInfo.basePipelineIndex = 0;
  5652. vulkanResult = renderer->vkCreateComputePipelines(
  5653. renderer->logicalDevice,
  5654. (VkPipelineCache)VK_NULL_HANDLE,
  5655. 1,
  5656. &computePipelineCreateInfo,
  5657. NULL,
  5658. &vulkanComputePipeline->pipeline);
  5659. if (vulkanResult != VK_SUCCESS) {
  5660. renderer->vkDestroyPipelineLayout(
  5661. renderer->logicalDevice,
  5662. vulkanComputePipeline->resourceLayout.pipelineLayout,
  5663. NULL);
  5664. for (i = 0; i < 3; i += 1) {
  5665. VULKAN_INTERNAL_DestroyDescriptorSetPool(
  5666. renderer,
  5667. &vulkanComputePipeline->resourceLayout.descriptorSetPools[i]);
  5668. }
  5669. renderer->vkDestroyShaderModule(
  5670. renderer->logicalDevice,
  5671. vulkanComputePipeline->shaderModule,
  5672. NULL);
  5673. LogVulkanResultAsError("vkCreateComputePipeline", vulkanResult);
  5674. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create compute pipeline!");
  5675. return NULL;
  5676. }
  5677. SDL_AtomicSet(&vulkanComputePipeline->referenceCount, 0);
  5678. return (SDL_GPUComputePipeline *)vulkanComputePipeline;
  5679. }
  5680. static SDL_GPUSampler *VULKAN_CreateSampler(
  5681. SDL_GPURenderer *driverData,
  5682. SDL_GPUSamplerCreateInfo *samplerCreateInfo)
  5683. {
  5684. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5685. VulkanSampler *vulkanSampler = SDL_malloc(sizeof(VulkanSampler));
  5686. VkResult vulkanResult;
  5687. VkSamplerCreateInfo vkSamplerCreateInfo;
  5688. vkSamplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
  5689. vkSamplerCreateInfo.pNext = NULL;
  5690. vkSamplerCreateInfo.flags = 0;
  5691. vkSamplerCreateInfo.magFilter = SDLToVK_Filter[samplerCreateInfo->magFilter];
  5692. vkSamplerCreateInfo.minFilter = SDLToVK_Filter[samplerCreateInfo->minFilter];
  5693. vkSamplerCreateInfo.mipmapMode = SDLToVK_SamplerMipmapMode[samplerCreateInfo->mipmapMode];
  5694. vkSamplerCreateInfo.addressModeU = SDLToVK_SamplerAddressMode[samplerCreateInfo->addressModeU];
  5695. vkSamplerCreateInfo.addressModeV = SDLToVK_SamplerAddressMode[samplerCreateInfo->addressModeV];
  5696. vkSamplerCreateInfo.addressModeW = SDLToVK_SamplerAddressMode[samplerCreateInfo->addressModeW];
  5697. vkSamplerCreateInfo.mipLodBias = samplerCreateInfo->mipLodBias;
  5698. vkSamplerCreateInfo.anisotropyEnable = samplerCreateInfo->anisotropyEnable;
  5699. vkSamplerCreateInfo.maxAnisotropy = samplerCreateInfo->maxAnisotropy;
  5700. vkSamplerCreateInfo.compareEnable = samplerCreateInfo->compareEnable;
  5701. vkSamplerCreateInfo.compareOp = SDLToVK_CompareOp[samplerCreateInfo->compareOp];
  5702. vkSamplerCreateInfo.minLod = samplerCreateInfo->minLod;
  5703. vkSamplerCreateInfo.maxLod = samplerCreateInfo->maxLod;
  5704. vkSamplerCreateInfo.borderColor = VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK; // arbitrary, unused
  5705. vkSamplerCreateInfo.unnormalizedCoordinates = VK_FALSE;
  5706. vulkanResult = renderer->vkCreateSampler(
  5707. renderer->logicalDevice,
  5708. &vkSamplerCreateInfo,
  5709. NULL,
  5710. &vulkanSampler->sampler);
  5711. if (vulkanResult != VK_SUCCESS) {
  5712. SDL_free(vulkanSampler);
  5713. LogVulkanResultAsError("vkCreateSampler", vulkanResult);
  5714. return NULL;
  5715. }
  5716. SDL_AtomicSet(&vulkanSampler->referenceCount, 0);
  5717. return (SDL_GPUSampler *)vulkanSampler;
  5718. }
  5719. static SDL_GPUShader *VULKAN_CreateShader(
  5720. SDL_GPURenderer *driverData,
  5721. SDL_GPUShaderCreateInfo *shaderCreateInfo)
  5722. {
  5723. VulkanShader *vulkanShader;
  5724. VkResult vulkanResult;
  5725. VkShaderModuleCreateInfo vkShaderModuleCreateInfo;
  5726. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5727. size_t entryPointNameLength;
  5728. vulkanShader = SDL_malloc(sizeof(VulkanShader));
  5729. vkShaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  5730. vkShaderModuleCreateInfo.pNext = NULL;
  5731. vkShaderModuleCreateInfo.flags = 0;
  5732. vkShaderModuleCreateInfo.codeSize = shaderCreateInfo->codeSize;
  5733. vkShaderModuleCreateInfo.pCode = (Uint32 *)shaderCreateInfo->code;
  5734. vulkanResult = renderer->vkCreateShaderModule(
  5735. renderer->logicalDevice,
  5736. &vkShaderModuleCreateInfo,
  5737. NULL,
  5738. &vulkanShader->shaderModule);
  5739. if (vulkanResult != VK_SUCCESS) {
  5740. SDL_free(vulkanShader);
  5741. LogVulkanResultAsError("vkCreateShaderModule", vulkanResult);
  5742. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create shader module!");
  5743. return NULL;
  5744. }
  5745. entryPointNameLength = SDL_strlen(shaderCreateInfo->entryPointName) + 1;
  5746. vulkanShader->entryPointName = SDL_malloc(entryPointNameLength);
  5747. SDL_utf8strlcpy((char *)vulkanShader->entryPointName, shaderCreateInfo->entryPointName, entryPointNameLength);
  5748. vulkanShader->samplerCount = shaderCreateInfo->samplerCount;
  5749. vulkanShader->storageTextureCount = shaderCreateInfo->storageTextureCount;
  5750. vulkanShader->storageBufferCount = shaderCreateInfo->storageBufferCount;
  5751. vulkanShader->uniformBufferCount = shaderCreateInfo->uniformBufferCount;
  5752. SDL_AtomicSet(&vulkanShader->referenceCount, 0);
  5753. return (SDL_GPUShader *)vulkanShader;
  5754. }
  5755. static bool VULKAN_SupportsSampleCount(
  5756. SDL_GPURenderer *driverData,
  5757. SDL_GPUTextureFormat format,
  5758. SDL_GPUSampleCount sampleCount)
  5759. {
  5760. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5761. VkSampleCountFlags bits = IsDepthFormat(format) ? renderer->physicalDeviceProperties.properties.limits.framebufferDepthSampleCounts : renderer->physicalDeviceProperties.properties.limits.framebufferColorSampleCounts;
  5762. VkSampleCountFlagBits vkSampleCount = SDLToVK_SampleCount[sampleCount];
  5763. return !!(bits & vkSampleCount);
  5764. }
  5765. static SDL_GPUTexture *VULKAN_CreateTexture(
  5766. SDL_GPURenderer *driverData,
  5767. SDL_GPUTextureCreateInfo *textureCreateInfo)
  5768. {
  5769. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5770. VkImageAspectFlags imageAspectFlags;
  5771. Uint8 isDepthFormat = IsDepthFormat(textureCreateInfo->format);
  5772. VkFormat format;
  5773. VkComponentMapping swizzle;
  5774. VulkanTextureContainer *container;
  5775. VulkanTextureHandle *textureHandle;
  5776. format = SDLToVK_SurfaceFormat[textureCreateInfo->format];
  5777. swizzle = SDLToVK_SurfaceSwizzle[textureCreateInfo->format];
  5778. if (isDepthFormat) {
  5779. imageAspectFlags = VK_IMAGE_ASPECT_DEPTH_BIT;
  5780. if (IsStencilFormat(textureCreateInfo->format)) {
  5781. imageAspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
  5782. }
  5783. } else {
  5784. imageAspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
  5785. }
  5786. textureHandle = VULKAN_INTERNAL_CreateTextureHandle(
  5787. renderer,
  5788. textureCreateInfo->width,
  5789. textureCreateInfo->height,
  5790. textureCreateInfo->type == SDL_GPU_TEXTURETYPE_3D ? textureCreateInfo->layerCountOrDepth : 1,
  5791. textureCreateInfo->type,
  5792. textureCreateInfo->type == SDL_GPU_TEXTURETYPE_3D ? 1 : textureCreateInfo->layerCountOrDepth,
  5793. textureCreateInfo->levelCount,
  5794. SDLToVK_SampleCount[textureCreateInfo->sampleCount],
  5795. format,
  5796. swizzle,
  5797. imageAspectFlags,
  5798. textureCreateInfo->usageFlags,
  5799. false);
  5800. if (textureHandle == NULL) {
  5801. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create texture container!");
  5802. return NULL;
  5803. }
  5804. container = SDL_malloc(sizeof(VulkanTextureContainer));
  5805. container->header.info = *textureCreateInfo;
  5806. container->canBeCycled = 1;
  5807. container->activeTextureHandle = textureHandle;
  5808. container->textureCapacity = 1;
  5809. container->textureCount = 1;
  5810. container->textureHandles = SDL_malloc(
  5811. container->textureCapacity * sizeof(VulkanTextureHandle *));
  5812. container->textureHandles[0] = container->activeTextureHandle;
  5813. container->debugName = NULL;
  5814. textureHandle->container = container;
  5815. return (SDL_GPUTexture *)container;
  5816. }
  5817. static SDL_GPUBuffer *VULKAN_CreateBuffer(
  5818. SDL_GPURenderer *driverData,
  5819. SDL_GPUBufferUsageFlags usageFlags,
  5820. Uint32 sizeInBytes)
  5821. {
  5822. return (SDL_GPUBuffer *)VULKAN_INTERNAL_CreateBufferContainer(
  5823. (VulkanRenderer *)driverData,
  5824. (VkDeviceSize)sizeInBytes,
  5825. usageFlags,
  5826. VULKAN_BUFFER_TYPE_GPU);
  5827. }
  5828. static VulkanUniformBuffer *VULKAN_INTERNAL_CreateUniformBuffer(
  5829. VulkanRenderer *renderer,
  5830. Uint32 sizeInBytes)
  5831. {
  5832. VulkanUniformBuffer *uniformBuffer = SDL_malloc(sizeof(VulkanUniformBuffer));
  5833. uniformBuffer->bufferHandle = VULKAN_INTERNAL_CreateBufferHandle(
  5834. renderer,
  5835. (VkDeviceSize)sizeInBytes,
  5836. 0,
  5837. VULKAN_BUFFER_TYPE_UNIFORM);
  5838. uniformBuffer->drawOffset = 0;
  5839. uniformBuffer->writeOffset = 0;
  5840. return uniformBuffer;
  5841. }
  5842. static SDL_GPUTransferBuffer *VULKAN_CreateTransferBuffer(
  5843. SDL_GPURenderer *driverData,
  5844. SDL_GPUTransferBufferUsage usage, // ignored on Vulkan
  5845. Uint32 sizeInBytes)
  5846. {
  5847. return (SDL_GPUTransferBuffer *)VULKAN_INTERNAL_CreateBufferContainer(
  5848. (VulkanRenderer *)driverData,
  5849. (VkDeviceSize)sizeInBytes,
  5850. 0,
  5851. VULKAN_BUFFER_TYPE_TRANSFER);
  5852. }
  5853. static void VULKAN_INTERNAL_ReleaseTexture(
  5854. VulkanRenderer *renderer,
  5855. VulkanTexture *vulkanTexture)
  5856. {
  5857. if (vulkanTexture->markedForDestroy) {
  5858. return;
  5859. }
  5860. SDL_LockMutex(renderer->disposeLock);
  5861. EXPAND_ARRAY_IF_NEEDED(
  5862. renderer->texturesToDestroy,
  5863. VulkanTexture *,
  5864. renderer->texturesToDestroyCount + 1,
  5865. renderer->texturesToDestroyCapacity,
  5866. renderer->texturesToDestroyCapacity * 2)
  5867. renderer->texturesToDestroy[renderer->texturesToDestroyCount] = vulkanTexture;
  5868. renderer->texturesToDestroyCount += 1;
  5869. vulkanTexture->markedForDestroy = 1;
  5870. SDL_UnlockMutex(renderer->disposeLock);
  5871. }
  5872. static void VULKAN_ReleaseTexture(
  5873. SDL_GPURenderer *driverData,
  5874. SDL_GPUTexture *texture)
  5875. {
  5876. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5877. VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer *)texture;
  5878. Uint32 i;
  5879. SDL_LockMutex(renderer->disposeLock);
  5880. for (i = 0; i < vulkanTextureContainer->textureCount; i += 1) {
  5881. VULKAN_INTERNAL_ReleaseTexture(renderer, vulkanTextureContainer->textureHandles[i]->vulkanTexture);
  5882. SDL_free(vulkanTextureContainer->textureHandles[i]);
  5883. }
  5884. // Containers are just client handles, so we can destroy immediately
  5885. if (vulkanTextureContainer->debugName != NULL) {
  5886. SDL_free(vulkanTextureContainer->debugName);
  5887. }
  5888. SDL_free(vulkanTextureContainer->textureHandles);
  5889. SDL_free(vulkanTextureContainer);
  5890. SDL_UnlockMutex(renderer->disposeLock);
  5891. }
  5892. static void VULKAN_ReleaseSampler(
  5893. SDL_GPURenderer *driverData,
  5894. SDL_GPUSampler *sampler)
  5895. {
  5896. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5897. VulkanSampler *vulkanSampler = (VulkanSampler *)sampler;
  5898. SDL_LockMutex(renderer->disposeLock);
  5899. EXPAND_ARRAY_IF_NEEDED(
  5900. renderer->samplersToDestroy,
  5901. VulkanSampler *,
  5902. renderer->samplersToDestroyCount + 1,
  5903. renderer->samplersToDestroyCapacity,
  5904. renderer->samplersToDestroyCapacity * 2)
  5905. renderer->samplersToDestroy[renderer->samplersToDestroyCount] = vulkanSampler;
  5906. renderer->samplersToDestroyCount += 1;
  5907. SDL_UnlockMutex(renderer->disposeLock);
  5908. }
  5909. static void VULKAN_INTERNAL_ReleaseBuffer(
  5910. VulkanRenderer *renderer,
  5911. VulkanBuffer *vulkanBuffer)
  5912. {
  5913. if (vulkanBuffer->markedForDestroy) {
  5914. return;
  5915. }
  5916. SDL_LockMutex(renderer->disposeLock);
  5917. EXPAND_ARRAY_IF_NEEDED(
  5918. renderer->buffersToDestroy,
  5919. VulkanBuffer *,
  5920. renderer->buffersToDestroyCount + 1,
  5921. renderer->buffersToDestroyCapacity,
  5922. renderer->buffersToDestroyCapacity * 2)
  5923. renderer->buffersToDestroy[renderer->buffersToDestroyCount] = vulkanBuffer;
  5924. renderer->buffersToDestroyCount += 1;
  5925. vulkanBuffer->markedForDestroy = 1;
  5926. SDL_UnlockMutex(renderer->disposeLock);
  5927. }
  5928. static void VULKAN_INTERNAL_ReleaseBufferContainer(
  5929. VulkanRenderer *renderer,
  5930. VulkanBufferContainer *bufferContainer)
  5931. {
  5932. Uint32 i;
  5933. SDL_LockMutex(renderer->disposeLock);
  5934. for (i = 0; i < bufferContainer->bufferCount; i += 1) {
  5935. VULKAN_INTERNAL_ReleaseBuffer(renderer, bufferContainer->bufferHandles[i]->vulkanBuffer);
  5936. SDL_free(bufferContainer->bufferHandles[i]);
  5937. }
  5938. // Containers are just client handles, so we can free immediately
  5939. if (bufferContainer->debugName != NULL) {
  5940. SDL_free(bufferContainer->debugName);
  5941. }
  5942. SDL_free(bufferContainer->bufferHandles);
  5943. SDL_free(bufferContainer);
  5944. SDL_UnlockMutex(renderer->disposeLock);
  5945. }
  5946. static void VULKAN_ReleaseBuffer(
  5947. SDL_GPURenderer *driverData,
  5948. SDL_GPUBuffer *buffer)
  5949. {
  5950. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5951. VulkanBufferContainer *vulkanBufferContainer = (VulkanBufferContainer *)buffer;
  5952. VULKAN_INTERNAL_ReleaseBufferContainer(
  5953. renderer,
  5954. vulkanBufferContainer);
  5955. }
  5956. static void VULKAN_ReleaseTransferBuffer(
  5957. SDL_GPURenderer *driverData,
  5958. SDL_GPUTransferBuffer *transferBuffer)
  5959. {
  5960. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5961. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)transferBuffer;
  5962. VULKAN_INTERNAL_ReleaseBufferContainer(
  5963. renderer,
  5964. transferBufferContainer);
  5965. }
  5966. static void VULKAN_ReleaseShader(
  5967. SDL_GPURenderer *driverData,
  5968. SDL_GPUShader *shader)
  5969. {
  5970. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5971. VulkanShader *vulkanShader = (VulkanShader *)shader;
  5972. SDL_LockMutex(renderer->disposeLock);
  5973. EXPAND_ARRAY_IF_NEEDED(
  5974. renderer->shadersToDestroy,
  5975. VulkanShader *,
  5976. renderer->shadersToDestroyCount + 1,
  5977. renderer->shadersToDestroyCapacity,
  5978. renderer->shadersToDestroyCapacity * 2)
  5979. renderer->shadersToDestroy[renderer->shadersToDestroyCount] = vulkanShader;
  5980. renderer->shadersToDestroyCount += 1;
  5981. SDL_UnlockMutex(renderer->disposeLock);
  5982. }
  5983. static void VULKAN_ReleaseComputePipeline(
  5984. SDL_GPURenderer *driverData,
  5985. SDL_GPUComputePipeline *computePipeline)
  5986. {
  5987. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  5988. VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline *)computePipeline;
  5989. SDL_LockMutex(renderer->disposeLock);
  5990. EXPAND_ARRAY_IF_NEEDED(
  5991. renderer->computePipelinesToDestroy,
  5992. VulkanComputePipeline *,
  5993. renderer->computePipelinesToDestroyCount + 1,
  5994. renderer->computePipelinesToDestroyCapacity,
  5995. renderer->computePipelinesToDestroyCapacity * 2)
  5996. renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount] = vulkanComputePipeline;
  5997. renderer->computePipelinesToDestroyCount += 1;
  5998. SDL_UnlockMutex(renderer->disposeLock);
  5999. }
  6000. static void VULKAN_ReleaseGraphicsPipeline(
  6001. SDL_GPURenderer *driverData,
  6002. SDL_GPUGraphicsPipeline *graphicsPipeline)
  6003. {
  6004. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  6005. VulkanGraphicsPipeline *vulkanGraphicsPipeline = (VulkanGraphicsPipeline *)graphicsPipeline;
  6006. SDL_LockMutex(renderer->disposeLock);
  6007. EXPAND_ARRAY_IF_NEEDED(
  6008. renderer->graphicsPipelinesToDestroy,
  6009. VulkanGraphicsPipeline *,
  6010. renderer->graphicsPipelinesToDestroyCount + 1,
  6011. renderer->graphicsPipelinesToDestroyCapacity,
  6012. renderer->graphicsPipelinesToDestroyCapacity * 2)
  6013. renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount] = vulkanGraphicsPipeline;
  6014. renderer->graphicsPipelinesToDestroyCount += 1;
  6015. SDL_UnlockMutex(renderer->disposeLock);
  6016. }
  6017. // Command Buffer render state
  6018. static VkRenderPass VULKAN_INTERNAL_FetchRenderPass(
  6019. VulkanRenderer *renderer,
  6020. VulkanCommandBuffer *commandBuffer,
  6021. SDL_GPUColorAttachmentInfo *colorAttachmentInfos,
  6022. Uint32 colorAttachmentCount,
  6023. SDL_GPUDepthStencilAttachmentInfo *depthStencilAttachmentInfo)
  6024. {
  6025. VulkanRenderPassHashTableValue *renderPassWrapper = NULL;
  6026. VkRenderPass renderPassHandle;
  6027. RenderPassHashTableKey key;
  6028. Uint32 i;
  6029. for (i = 0; i < colorAttachmentCount; i += 1) {
  6030. key.colorTargetDescriptions[i].format = ((VulkanTextureContainer *)colorAttachmentInfos[i].texture)->activeTextureHandle->vulkanTexture->format;
  6031. key.colorTargetDescriptions[i].loadOp = colorAttachmentInfos[i].loadOp;
  6032. key.colorTargetDescriptions[i].storeOp = colorAttachmentInfos[i].storeOp;
  6033. }
  6034. key.colorAttachmentSampleCount = VK_SAMPLE_COUNT_1_BIT;
  6035. if (colorAttachmentCount > 0) {
  6036. key.colorAttachmentSampleCount = ((VulkanTextureContainer *)colorAttachmentInfos[0].texture)->activeTextureHandle->vulkanTexture->sampleCount;
  6037. }
  6038. key.colorAttachmentCount = colorAttachmentCount;
  6039. if (depthStencilAttachmentInfo == NULL) {
  6040. key.depthStencilTargetDescription.format = 0;
  6041. key.depthStencilTargetDescription.loadOp = SDL_GPU_LOADOP_DONT_CARE;
  6042. key.depthStencilTargetDescription.storeOp = SDL_GPU_STOREOP_DONT_CARE;
  6043. key.depthStencilTargetDescription.stencilLoadOp = SDL_GPU_LOADOP_DONT_CARE;
  6044. key.depthStencilTargetDescription.stencilStoreOp = SDL_GPU_STOREOP_DONT_CARE;
  6045. } else {
  6046. key.depthStencilTargetDescription.format = ((VulkanTextureContainer *)depthStencilAttachmentInfo->texture)->activeTextureHandle->vulkanTexture->format;
  6047. key.depthStencilTargetDescription.loadOp = depthStencilAttachmentInfo->loadOp;
  6048. key.depthStencilTargetDescription.storeOp = depthStencilAttachmentInfo->storeOp;
  6049. key.depthStencilTargetDescription.stencilLoadOp = depthStencilAttachmentInfo->stencilLoadOp;
  6050. key.depthStencilTargetDescription.stencilStoreOp = depthStencilAttachmentInfo->stencilStoreOp;
  6051. }
  6052. SDL_LockMutex(renderer->renderPassFetchLock);
  6053. bool result = SDL_FindInHashTable(
  6054. renderer->renderPassHashTable,
  6055. (const void *)&key,
  6056. (const void **)&renderPassWrapper);
  6057. SDL_UnlockMutex(renderer->renderPassFetchLock);
  6058. if (result) {
  6059. return renderPassWrapper->handle;
  6060. }
  6061. renderPassHandle = VULKAN_INTERNAL_CreateRenderPass(
  6062. renderer,
  6063. commandBuffer,
  6064. colorAttachmentInfos,
  6065. colorAttachmentCount,
  6066. depthStencilAttachmentInfo);
  6067. if (renderPassHandle == VK_NULL_HANDLE) {
  6068. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create VkRenderPass!");
  6069. return VK_NULL_HANDLE;
  6070. }
  6071. // Have to malloc the key to store it in the hashtable
  6072. RenderPassHashTableKey *allocedKey = SDL_malloc(sizeof(RenderPassHashTableKey));
  6073. SDL_memcpy(allocedKey, &key, sizeof(RenderPassHashTableKey));
  6074. renderPassWrapper = SDL_malloc(sizeof(VulkanRenderPassHashTableValue));
  6075. renderPassWrapper->handle = renderPassHandle;
  6076. SDL_LockMutex(renderer->renderPassFetchLock);
  6077. SDL_InsertIntoHashTable(
  6078. renderer->renderPassHashTable,
  6079. (const void *)allocedKey,
  6080. (const void *)renderPassWrapper);
  6081. SDL_UnlockMutex(renderer->renderPassFetchLock);
  6082. return renderPassHandle;
  6083. }
  6084. static VulkanFramebuffer *VULKAN_INTERNAL_FetchFramebuffer(
  6085. VulkanRenderer *renderer,
  6086. VkRenderPass renderPass,
  6087. SDL_GPUColorAttachmentInfo *colorAttachmentInfos,
  6088. Uint32 colorAttachmentCount,
  6089. SDL_GPUDepthStencilAttachmentInfo *depthStencilAttachmentInfo,
  6090. Uint32 width,
  6091. Uint32 height)
  6092. {
  6093. VulkanFramebuffer *vulkanFramebuffer = NULL;
  6094. VkFramebufferCreateInfo framebufferInfo;
  6095. VkResult result;
  6096. VkImageView imageViewAttachments[2 * MAX_COLOR_TARGET_BINDINGS + 1];
  6097. FramebufferHashTableKey key;
  6098. Uint32 attachmentCount = 0;
  6099. Uint32 i;
  6100. for (i = 0; i < MAX_COLOR_TARGET_BINDINGS; i += 1) {
  6101. key.colorAttachmentViews[i] = VK_NULL_HANDLE;
  6102. key.colorMultiSampleAttachmentViews[i] = VK_NULL_HANDLE;
  6103. }
  6104. key.colorAttachmentCount = colorAttachmentCount;
  6105. for (i = 0; i < colorAttachmentCount; i += 1) {
  6106. VulkanTextureContainer *container = (VulkanTextureContainer *)colorAttachmentInfos[i].texture;
  6107. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6108. container,
  6109. container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorAttachmentInfos[i].layerOrDepthPlane,
  6110. colorAttachmentInfos[i].mipLevel);
  6111. Uint32 rtvIndex =
  6112. container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? colorAttachmentInfos[i].layerOrDepthPlane : 0;
  6113. key.colorAttachmentViews[i] = subresource->renderTargetViews[rtvIndex];
  6114. if (subresource->msaaTexHandle != NULL) {
  6115. key.colorMultiSampleAttachmentViews[i] = subresource->msaaTexHandle->vulkanTexture->subresources[0].renderTargetViews[0];
  6116. }
  6117. }
  6118. if (depthStencilAttachmentInfo == NULL) {
  6119. key.depthStencilAttachmentView = VK_NULL_HANDLE;
  6120. } else {
  6121. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6122. (VulkanTextureContainer *)depthStencilAttachmentInfo->texture,
  6123. 0,
  6124. 0);
  6125. key.depthStencilAttachmentView = subresource->depthStencilView;
  6126. }
  6127. key.width = width;
  6128. key.height = height;
  6129. SDL_LockMutex(renderer->framebufferFetchLock);
  6130. bool findResult = SDL_FindInHashTable(
  6131. renderer->framebufferHashTable,
  6132. (const void *)&key,
  6133. (const void **)&vulkanFramebuffer);
  6134. SDL_UnlockMutex(renderer->framebufferFetchLock);
  6135. if (findResult) {
  6136. return vulkanFramebuffer;
  6137. }
  6138. vulkanFramebuffer = SDL_malloc(sizeof(VulkanFramebuffer));
  6139. SDL_AtomicSet(&vulkanFramebuffer->referenceCount, 0);
  6140. // Create a new framebuffer
  6141. for (i = 0; i < colorAttachmentCount; i += 1) {
  6142. VulkanTextureContainer *container = (VulkanTextureContainer *)colorAttachmentInfos[i].texture;
  6143. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6144. container,
  6145. container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorAttachmentInfos[i].layerOrDepthPlane,
  6146. colorAttachmentInfos[i].mipLevel);
  6147. Uint32 rtvIndex =
  6148. container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? colorAttachmentInfos[i].layerOrDepthPlane : 0;
  6149. imageViewAttachments[attachmentCount] =
  6150. subresource->renderTargetViews[rtvIndex];
  6151. attachmentCount += 1;
  6152. if (subresource->msaaTexHandle != NULL) {
  6153. imageViewAttachments[attachmentCount] =
  6154. subresource->msaaTexHandle->vulkanTexture->subresources[0].renderTargetViews[0];
  6155. attachmentCount += 1;
  6156. }
  6157. }
  6158. if (depthStencilAttachmentInfo != NULL) {
  6159. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6160. (VulkanTextureContainer *)depthStencilAttachmentInfo->texture,
  6161. 0,
  6162. 0);
  6163. imageViewAttachments[attachmentCount] = subresource->depthStencilView;
  6164. attachmentCount += 1;
  6165. }
  6166. framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
  6167. framebufferInfo.pNext = NULL;
  6168. framebufferInfo.flags = 0;
  6169. framebufferInfo.renderPass = renderPass;
  6170. framebufferInfo.attachmentCount = attachmentCount;
  6171. framebufferInfo.pAttachments = imageViewAttachments;
  6172. framebufferInfo.width = key.width;
  6173. framebufferInfo.height = key.height;
  6174. framebufferInfo.layers = 1;
  6175. result = renderer->vkCreateFramebuffer(
  6176. renderer->logicalDevice,
  6177. &framebufferInfo,
  6178. NULL,
  6179. &vulkanFramebuffer->framebuffer);
  6180. if (result == VK_SUCCESS) {
  6181. // Have to malloc the key to store it in the hashtable
  6182. FramebufferHashTableKey *allocedKey = SDL_malloc(sizeof(FramebufferHashTableKey));
  6183. SDL_memcpy(allocedKey, &key, sizeof(FramebufferHashTableKey));
  6184. SDL_LockMutex(renderer->framebufferFetchLock);
  6185. SDL_InsertIntoHashTable(
  6186. renderer->framebufferHashTable,
  6187. (const void *)allocedKey,
  6188. (const void *)vulkanFramebuffer);
  6189. SDL_UnlockMutex(renderer->framebufferFetchLock);
  6190. } else {
  6191. LogVulkanResultAsError("vkCreateFramebuffer", result);
  6192. SDL_free(vulkanFramebuffer);
  6193. vulkanFramebuffer = NULL;
  6194. }
  6195. return vulkanFramebuffer;
  6196. }
  6197. static void VULKAN_INTERNAL_SetCurrentViewport(
  6198. VulkanCommandBuffer *commandBuffer,
  6199. SDL_GPUViewport *viewport)
  6200. {
  6201. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6202. vulkanCommandBuffer->currentViewport.x = viewport->x;
  6203. vulkanCommandBuffer->currentViewport.width = viewport->w;
  6204. vulkanCommandBuffer->currentViewport.minDepth = viewport->minDepth;
  6205. vulkanCommandBuffer->currentViewport.maxDepth = viewport->maxDepth;
  6206. // Viewport flip for consistency with other backends
  6207. // FIXME: need moltenVK hack
  6208. vulkanCommandBuffer->currentViewport.y = viewport->y + viewport->h;
  6209. vulkanCommandBuffer->currentViewport.height = -viewport->h;
  6210. }
  6211. static void VULKAN_SetViewport(
  6212. SDL_GPUCommandBuffer *commandBuffer,
  6213. SDL_GPUViewport *viewport)
  6214. {
  6215. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6216. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  6217. VULKAN_INTERNAL_SetCurrentViewport(
  6218. vulkanCommandBuffer,
  6219. viewport);
  6220. renderer->vkCmdSetViewport(
  6221. vulkanCommandBuffer->commandBuffer,
  6222. 0,
  6223. 1,
  6224. &vulkanCommandBuffer->currentViewport);
  6225. }
  6226. static void VULKAN_INTERNAL_SetCurrentScissor(
  6227. VulkanCommandBuffer *vulkanCommandBuffer,
  6228. SDL_Rect *scissor)
  6229. {
  6230. vulkanCommandBuffer->currentScissor.offset.x = scissor->x;
  6231. vulkanCommandBuffer->currentScissor.offset.y = scissor->y;
  6232. vulkanCommandBuffer->currentScissor.extent.width = scissor->w;
  6233. vulkanCommandBuffer->currentScissor.extent.height = scissor->h;
  6234. }
  6235. static void VULKAN_SetScissor(
  6236. SDL_GPUCommandBuffer *commandBuffer,
  6237. SDL_Rect *scissor)
  6238. {
  6239. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6240. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  6241. VULKAN_INTERNAL_SetCurrentScissor(
  6242. vulkanCommandBuffer,
  6243. scissor);
  6244. renderer->vkCmdSetScissor(
  6245. vulkanCommandBuffer->commandBuffer,
  6246. 0,
  6247. 1,
  6248. &vulkanCommandBuffer->currentScissor);
  6249. }
  6250. static void VULKAN_BindVertexSamplers(
  6251. SDL_GPUCommandBuffer *commandBuffer,
  6252. Uint32 firstSlot,
  6253. SDL_GPUTextureSamplerBinding *textureSamplerBindings,
  6254. Uint32 bindingCount)
  6255. {
  6256. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6257. for (Uint32 i = 0; i < bindingCount; i += 1) {
  6258. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture;
  6259. vulkanCommandBuffer->vertexSamplerTextures[firstSlot + i] = textureContainer->activeTextureHandle->vulkanTexture;
  6260. vulkanCommandBuffer->vertexSamplers[firstSlot + i] = (VulkanSampler *)textureSamplerBindings[i].sampler;
  6261. VULKAN_INTERNAL_TrackSampler(
  6262. vulkanCommandBuffer,
  6263. (VulkanSampler *)textureSamplerBindings[i].sampler);
  6264. VULKAN_INTERNAL_TrackTexture(
  6265. vulkanCommandBuffer,
  6266. textureContainer->activeTextureHandle->vulkanTexture);
  6267. }
  6268. vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
  6269. }
  6270. static void VULKAN_BindVertexStorageTextures(
  6271. SDL_GPUCommandBuffer *commandBuffer,
  6272. Uint32 firstSlot,
  6273. SDL_GPUTexture **storageTextures,
  6274. Uint32 bindingCount)
  6275. {
  6276. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6277. for (Uint32 i = 0; i < bindingCount; i += 1) {
  6278. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i];
  6279. vulkanCommandBuffer->vertexStorageTextures[firstSlot + i] = textureContainer->activeTextureHandle->vulkanTexture;
  6280. VULKAN_INTERNAL_TrackTexture(
  6281. vulkanCommandBuffer,
  6282. textureContainer->activeTextureHandle->vulkanTexture);
  6283. }
  6284. vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
  6285. }
  6286. static void VULKAN_BindVertexStorageBuffers(
  6287. SDL_GPUCommandBuffer *commandBuffer,
  6288. Uint32 firstSlot,
  6289. SDL_GPUBuffer **storageBuffers,
  6290. Uint32 bindingCount)
  6291. {
  6292. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6293. VulkanBufferContainer *bufferContainer;
  6294. Uint32 i;
  6295. for (i = 0; i < bindingCount; i += 1) {
  6296. bufferContainer = (VulkanBufferContainer *)storageBuffers[i];
  6297. vulkanCommandBuffer->vertexStorageBuffers[firstSlot + i] = bufferContainer->activeBufferHandle->vulkanBuffer;
  6298. VULKAN_INTERNAL_TrackBuffer(
  6299. vulkanCommandBuffer,
  6300. bufferContainer->activeBufferHandle->vulkanBuffer);
  6301. }
  6302. vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
  6303. }
  6304. static void VULKAN_BindFragmentSamplers(
  6305. SDL_GPUCommandBuffer *commandBuffer,
  6306. Uint32 firstSlot,
  6307. SDL_GPUTextureSamplerBinding *textureSamplerBindings,
  6308. Uint32 bindingCount)
  6309. {
  6310. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6311. for (Uint32 i = 0; i < bindingCount; i += 1) {
  6312. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)textureSamplerBindings[i].texture;
  6313. vulkanCommandBuffer->fragmentSamplerTextures[firstSlot + i] = textureContainer->activeTextureHandle->vulkanTexture;
  6314. vulkanCommandBuffer->fragmentSamplers[firstSlot + i] = (VulkanSampler *)textureSamplerBindings[i].sampler;
  6315. VULKAN_INTERNAL_TrackSampler(
  6316. vulkanCommandBuffer,
  6317. (VulkanSampler *)textureSamplerBindings[i].sampler);
  6318. VULKAN_INTERNAL_TrackTexture(
  6319. vulkanCommandBuffer,
  6320. textureContainer->activeTextureHandle->vulkanTexture);
  6321. }
  6322. vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
  6323. }
  6324. static void VULKAN_BindFragmentStorageTextures(
  6325. SDL_GPUCommandBuffer *commandBuffer,
  6326. Uint32 firstSlot,
  6327. SDL_GPUTexture **storageTextures,
  6328. Uint32 bindingCount)
  6329. {
  6330. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6331. for (Uint32 i = 0; i < bindingCount; i += 1) {
  6332. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i];
  6333. vulkanCommandBuffer->fragmentStorageTextures[firstSlot + i] =
  6334. textureContainer->activeTextureHandle->vulkanTexture;
  6335. VULKAN_INTERNAL_TrackTexture(
  6336. vulkanCommandBuffer,
  6337. textureContainer->activeTextureHandle->vulkanTexture);
  6338. }
  6339. vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
  6340. }
  6341. static void VULKAN_BindFragmentStorageBuffers(
  6342. SDL_GPUCommandBuffer *commandBuffer,
  6343. Uint32 firstSlot,
  6344. SDL_GPUBuffer **storageBuffers,
  6345. Uint32 bindingCount)
  6346. {
  6347. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6348. VulkanBufferContainer *bufferContainer;
  6349. Uint32 i;
  6350. for (i = 0; i < bindingCount; i += 1) {
  6351. bufferContainer = (VulkanBufferContainer *)storageBuffers[i];
  6352. vulkanCommandBuffer->fragmentStorageBuffers[firstSlot + i] = bufferContainer->activeBufferHandle->vulkanBuffer;
  6353. VULKAN_INTERNAL_TrackBuffer(
  6354. vulkanCommandBuffer,
  6355. bufferContainer->activeBufferHandle->vulkanBuffer);
  6356. }
  6357. vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
  6358. }
  6359. static VulkanUniformBuffer *VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6360. VulkanCommandBuffer *commandBuffer)
  6361. {
  6362. VulkanRenderer *renderer = commandBuffer->renderer;
  6363. VulkanUniformBuffer *uniformBuffer;
  6364. SDL_LockMutex(renderer->acquireUniformBufferLock);
  6365. if (renderer->uniformBufferPoolCount > 0) {
  6366. uniformBuffer = renderer->uniformBufferPool[renderer->uniformBufferPoolCount - 1];
  6367. renderer->uniformBufferPoolCount -= 1;
  6368. } else {
  6369. uniformBuffer = VULKAN_INTERNAL_CreateUniformBuffer(
  6370. renderer,
  6371. UNIFORM_BUFFER_SIZE);
  6372. }
  6373. SDL_UnlockMutex(renderer->acquireUniformBufferLock);
  6374. VULKAN_INTERNAL_TrackUniformBuffer(commandBuffer, uniformBuffer);
  6375. return uniformBuffer;
  6376. }
  6377. static void VULKAN_INTERNAL_ReturnUniformBufferToPool(
  6378. VulkanRenderer *renderer,
  6379. VulkanUniformBuffer *uniformBuffer)
  6380. {
  6381. if (renderer->uniformBufferPoolCount >= renderer->uniformBufferPoolCapacity) {
  6382. renderer->uniformBufferPoolCapacity *= 2;
  6383. renderer->uniformBufferPool = SDL_realloc(
  6384. renderer->uniformBufferPool,
  6385. renderer->uniformBufferPoolCapacity * sizeof(VulkanUniformBuffer *));
  6386. }
  6387. renderer->uniformBufferPool[renderer->uniformBufferPoolCount] = uniformBuffer;
  6388. renderer->uniformBufferPoolCount += 1;
  6389. uniformBuffer->writeOffset = 0;
  6390. uniformBuffer->drawOffset = 0;
  6391. }
  6392. static void VULKAN_INTERNAL_PushUniformData(
  6393. VulkanCommandBuffer *commandBuffer,
  6394. VulkanUniformBufferStage uniformBufferStage,
  6395. Uint32 slotIndex,
  6396. const void *data,
  6397. Uint32 dataLengthInBytes)
  6398. {
  6399. Uint32 blockSize =
  6400. VULKAN_INTERNAL_NextHighestAlignment32(
  6401. dataLengthInBytes,
  6402. commandBuffer->renderer->minUBOAlignment);
  6403. VulkanUniformBuffer *uniformBuffer;
  6404. if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) {
  6405. if (commandBuffer->vertexUniformBuffers[slotIndex] == NULL) {
  6406. commandBuffer->vertexUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6407. commandBuffer);
  6408. }
  6409. uniformBuffer = commandBuffer->vertexUniformBuffers[slotIndex];
  6410. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) {
  6411. if (commandBuffer->fragmentUniformBuffers[slotIndex] == NULL) {
  6412. commandBuffer->fragmentUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6413. commandBuffer);
  6414. }
  6415. uniformBuffer = commandBuffer->fragmentUniformBuffers[slotIndex];
  6416. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) {
  6417. if (commandBuffer->computeUniformBuffers[slotIndex] == NULL) {
  6418. commandBuffer->computeUniformBuffers[slotIndex] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6419. commandBuffer);
  6420. }
  6421. uniformBuffer = commandBuffer->computeUniformBuffers[slotIndex];
  6422. } else {
  6423. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!");
  6424. return;
  6425. }
  6426. // If there is no more room, acquire a new uniform buffer
  6427. if (uniformBuffer->writeOffset + blockSize + MAX_UBO_SECTION_SIZE >= uniformBuffer->bufferHandle->vulkanBuffer->size) {
  6428. uniformBuffer = VULKAN_INTERNAL_AcquireUniformBufferFromPool(commandBuffer);
  6429. uniformBuffer->drawOffset = 0;
  6430. uniformBuffer->writeOffset = 0;
  6431. if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) {
  6432. commandBuffer->vertexUniformBuffers[slotIndex] = uniformBuffer;
  6433. commandBuffer->needNewVertexUniformDescriptorSet = true;
  6434. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) {
  6435. commandBuffer->fragmentUniformBuffers[slotIndex] = uniformBuffer;
  6436. commandBuffer->needNewFragmentUniformDescriptorSet = true;
  6437. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) {
  6438. commandBuffer->computeUniformBuffers[slotIndex] = uniformBuffer;
  6439. commandBuffer->needNewComputeUniformDescriptorSet = true;
  6440. } else {
  6441. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!");
  6442. return;
  6443. }
  6444. }
  6445. uniformBuffer->drawOffset = uniformBuffer->writeOffset;
  6446. Uint8 *dst =
  6447. uniformBuffer->bufferHandle->vulkanBuffer->usedRegion->allocation->mapPointer +
  6448. uniformBuffer->bufferHandle->vulkanBuffer->usedRegion->resourceOffset +
  6449. uniformBuffer->writeOffset;
  6450. SDL_memcpy(
  6451. dst,
  6452. data,
  6453. dataLengthInBytes);
  6454. uniformBuffer->writeOffset += blockSize;
  6455. if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_VERTEX) {
  6456. commandBuffer->needNewVertexUniformOffsets = true;
  6457. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT) {
  6458. commandBuffer->needNewFragmentUniformOffsets = true;
  6459. } else if (uniformBufferStage == VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE) {
  6460. commandBuffer->needNewComputeUniformOffsets = true;
  6461. } else {
  6462. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Unrecognized shader stage!");
  6463. return;
  6464. }
  6465. }
  6466. static void VULKAN_BeginRenderPass(
  6467. SDL_GPUCommandBuffer *commandBuffer,
  6468. SDL_GPUColorAttachmentInfo *colorAttachmentInfos,
  6469. Uint32 colorAttachmentCount,
  6470. SDL_GPUDepthStencilAttachmentInfo *depthStencilAttachmentInfo)
  6471. {
  6472. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6473. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  6474. VkRenderPass renderPass;
  6475. VulkanFramebuffer *framebuffer;
  6476. Uint32 w, h;
  6477. VkClearValue *clearValues;
  6478. Uint32 clearCount = colorAttachmentCount;
  6479. Uint32 multisampleAttachmentCount = 0;
  6480. Uint32 totalColorAttachmentCount = 0;
  6481. Uint32 i;
  6482. SDL_GPUViewport defaultViewport;
  6483. SDL_Rect defaultScissor;
  6484. Uint32 framebufferWidth = UINT32_MAX;
  6485. Uint32 framebufferHeight = UINT32_MAX;
  6486. for (i = 0; i < colorAttachmentCount; i += 1) {
  6487. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)colorAttachmentInfos[i].texture;
  6488. w = textureContainer->activeTextureHandle->vulkanTexture->dimensions.width >> colorAttachmentInfos[i].mipLevel;
  6489. h = textureContainer->activeTextureHandle->vulkanTexture->dimensions.height >> colorAttachmentInfos[i].mipLevel;
  6490. // The framebuffer cannot be larger than the smallest attachment.
  6491. if (w < framebufferWidth) {
  6492. framebufferWidth = w;
  6493. }
  6494. if (h < framebufferHeight) {
  6495. framebufferHeight = h;
  6496. }
  6497. // FIXME: validate this in gpu.c
  6498. if (!(textureContainer->header.info.usageFlags & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET_BIT)) {
  6499. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Color attachment texture was not designated as a target!");
  6500. return;
  6501. }
  6502. }
  6503. if (depthStencilAttachmentInfo != NULL) {
  6504. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)depthStencilAttachmentInfo->texture;
  6505. w = textureContainer->activeTextureHandle->vulkanTexture->dimensions.width;
  6506. h = textureContainer->activeTextureHandle->vulkanTexture->dimensions.height;
  6507. // The framebuffer cannot be larger than the smallest attachment.
  6508. if (w < framebufferWidth) {
  6509. framebufferWidth = w;
  6510. }
  6511. if (h < framebufferHeight) {
  6512. framebufferHeight = h;
  6513. }
  6514. // FIXME: validate this in gpu.c
  6515. if (!(textureContainer->header.info.usageFlags & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET_BIT)) {
  6516. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Depth stencil attachment texture was not designated as a target!");
  6517. return;
  6518. }
  6519. }
  6520. for (i = 0; i < colorAttachmentCount; i += 1) {
  6521. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)colorAttachmentInfos[i].texture;
  6522. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  6523. renderer,
  6524. vulkanCommandBuffer,
  6525. textureContainer,
  6526. textureContainer->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorAttachmentInfos[i].layerOrDepthPlane,
  6527. colorAttachmentInfos[i].mipLevel,
  6528. colorAttachmentInfos[i].cycle,
  6529. VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT);
  6530. if (subresource->msaaTexHandle != NULL) {
  6531. // Transition the multisample attachment
  6532. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  6533. renderer,
  6534. vulkanCommandBuffer,
  6535. VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT,
  6536. &subresource->msaaTexHandle->vulkanTexture->subresources[0]);
  6537. clearCount += 1;
  6538. multisampleAttachmentCount += 1;
  6539. }
  6540. vulkanCommandBuffer->colorAttachmentSubresources[i] = subresource;
  6541. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, subresource->parent);
  6542. // TODO: do we need to track the msaa texture? or is it implicitly only used when the regular texture is used?
  6543. }
  6544. vulkanCommandBuffer->colorAttachmentSubresourceCount = colorAttachmentCount;
  6545. if (depthStencilAttachmentInfo != NULL) {
  6546. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)depthStencilAttachmentInfo->texture;
  6547. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  6548. renderer,
  6549. vulkanCommandBuffer,
  6550. textureContainer,
  6551. 0,
  6552. 0,
  6553. depthStencilAttachmentInfo->cycle,
  6554. VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT);
  6555. clearCount += 1;
  6556. vulkanCommandBuffer->depthStencilAttachmentSubresource = subresource;
  6557. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, subresource->parent);
  6558. }
  6559. // Fetch required render objects
  6560. renderPass = VULKAN_INTERNAL_FetchRenderPass(
  6561. renderer,
  6562. vulkanCommandBuffer,
  6563. colorAttachmentInfos,
  6564. colorAttachmentCount,
  6565. depthStencilAttachmentInfo);
  6566. framebuffer = VULKAN_INTERNAL_FetchFramebuffer(
  6567. renderer,
  6568. renderPass,
  6569. colorAttachmentInfos,
  6570. colorAttachmentCount,
  6571. depthStencilAttachmentInfo,
  6572. framebufferWidth,
  6573. framebufferHeight);
  6574. VULKAN_INTERNAL_TrackFramebuffer(renderer, vulkanCommandBuffer, framebuffer);
  6575. // Set clear values
  6576. clearValues = SDL_stack_alloc(VkClearValue, clearCount);
  6577. totalColorAttachmentCount = colorAttachmentCount + multisampleAttachmentCount;
  6578. for (i = 0; i < totalColorAttachmentCount; i += 1) {
  6579. clearValues[i].color.float32[0] = colorAttachmentInfos[i].clearColor.r;
  6580. clearValues[i].color.float32[1] = colorAttachmentInfos[i].clearColor.g;
  6581. clearValues[i].color.float32[2] = colorAttachmentInfos[i].clearColor.b;
  6582. clearValues[i].color.float32[3] = colorAttachmentInfos[i].clearColor.a;
  6583. VulkanTextureContainer *container = (VulkanTextureContainer *)colorAttachmentInfos[i].texture;
  6584. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_FetchTextureSubresource(
  6585. container,
  6586. container->header.info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : colorAttachmentInfos[i].layerOrDepthPlane,
  6587. colorAttachmentInfos[i].mipLevel);
  6588. if (subresource->parent->sampleCount > VK_SAMPLE_COUNT_1_BIT) {
  6589. clearValues[i + 1].color.float32[0] = colorAttachmentInfos[i].clearColor.r;
  6590. clearValues[i + 1].color.float32[1] = colorAttachmentInfos[i].clearColor.g;
  6591. clearValues[i + 1].color.float32[2] = colorAttachmentInfos[i].clearColor.b;
  6592. clearValues[i + 1].color.float32[3] = colorAttachmentInfos[i].clearColor.a;
  6593. i += 1;
  6594. }
  6595. }
  6596. if (depthStencilAttachmentInfo != NULL) {
  6597. clearValues[totalColorAttachmentCount].depthStencil.depth =
  6598. depthStencilAttachmentInfo->depthStencilClearValue.depth;
  6599. clearValues[totalColorAttachmentCount].depthStencil.stencil =
  6600. depthStencilAttachmentInfo->depthStencilClearValue.stencil;
  6601. }
  6602. VkRenderPassBeginInfo renderPassBeginInfo;
  6603. renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  6604. renderPassBeginInfo.pNext = NULL;
  6605. renderPassBeginInfo.renderPass = renderPass;
  6606. renderPassBeginInfo.framebuffer = framebuffer->framebuffer;
  6607. renderPassBeginInfo.pClearValues = clearValues;
  6608. renderPassBeginInfo.clearValueCount = clearCount;
  6609. renderPassBeginInfo.renderArea.extent.width = framebufferWidth;
  6610. renderPassBeginInfo.renderArea.extent.height = framebufferHeight;
  6611. renderPassBeginInfo.renderArea.offset.x = 0;
  6612. renderPassBeginInfo.renderArea.offset.y = 0;
  6613. renderer->vkCmdBeginRenderPass(
  6614. vulkanCommandBuffer->commandBuffer,
  6615. &renderPassBeginInfo,
  6616. VK_SUBPASS_CONTENTS_INLINE);
  6617. SDL_stack_free(clearValues);
  6618. // Set sensible default viewport state
  6619. defaultViewport.x = 0;
  6620. defaultViewport.y = 0;
  6621. defaultViewport.w = (float)framebufferWidth;
  6622. defaultViewport.h = (float)framebufferHeight;
  6623. defaultViewport.minDepth = 0;
  6624. defaultViewport.maxDepth = 1;
  6625. VULKAN_INTERNAL_SetCurrentViewport(
  6626. vulkanCommandBuffer,
  6627. &defaultViewport);
  6628. defaultScissor.x = 0;
  6629. defaultScissor.y = 0;
  6630. defaultScissor.w = (Sint32)framebufferWidth;
  6631. defaultScissor.h = (Sint32)framebufferHeight;
  6632. VULKAN_INTERNAL_SetCurrentScissor(
  6633. vulkanCommandBuffer,
  6634. &defaultScissor);
  6635. }
  6636. static void VULKAN_BindGraphicsPipeline(
  6637. SDL_GPUCommandBuffer *commandBuffer,
  6638. SDL_GPUGraphicsPipeline *graphicsPipeline)
  6639. {
  6640. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6641. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  6642. VulkanGraphicsPipeline *pipeline = (VulkanGraphicsPipeline *)graphicsPipeline;
  6643. renderer->vkCmdBindPipeline(
  6644. vulkanCommandBuffer->commandBuffer,
  6645. VK_PIPELINE_BIND_POINT_GRAPHICS,
  6646. pipeline->pipeline);
  6647. vulkanCommandBuffer->currentGraphicsPipeline = pipeline;
  6648. VULKAN_INTERNAL_TrackGraphicsPipeline(vulkanCommandBuffer, pipeline);
  6649. renderer->vkCmdSetViewport(
  6650. vulkanCommandBuffer->commandBuffer,
  6651. 0,
  6652. 1,
  6653. &vulkanCommandBuffer->currentViewport);
  6654. renderer->vkCmdSetScissor(
  6655. vulkanCommandBuffer->commandBuffer,
  6656. 0,
  6657. 1,
  6658. &vulkanCommandBuffer->currentScissor);
  6659. // Acquire uniform buffers if necessary
  6660. for (Uint32 i = 0; i < pipeline->resourceLayout.vertexUniformBufferCount; i += 1) {
  6661. if (vulkanCommandBuffer->vertexUniformBuffers[i] == NULL) {
  6662. vulkanCommandBuffer->vertexUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6663. vulkanCommandBuffer);
  6664. }
  6665. }
  6666. for (Uint32 i = 0; i < pipeline->resourceLayout.fragmentUniformBufferCount; i += 1) {
  6667. if (vulkanCommandBuffer->fragmentUniformBuffers[i] == NULL) {
  6668. vulkanCommandBuffer->fragmentUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6669. vulkanCommandBuffer);
  6670. }
  6671. }
  6672. // Mark bindings as needed
  6673. vulkanCommandBuffer->needNewVertexResourceDescriptorSet = true;
  6674. vulkanCommandBuffer->needNewFragmentResourceDescriptorSet = true;
  6675. vulkanCommandBuffer->needNewVertexUniformDescriptorSet = true;
  6676. vulkanCommandBuffer->needNewFragmentUniformDescriptorSet = true;
  6677. vulkanCommandBuffer->needNewVertexUniformOffsets = true;
  6678. vulkanCommandBuffer->needNewFragmentUniformOffsets = true;
  6679. }
  6680. static void VULKAN_BindVertexBuffers(
  6681. SDL_GPUCommandBuffer *commandBuffer,
  6682. Uint32 firstBinding,
  6683. SDL_GPUBufferBinding *pBindings,
  6684. Uint32 bindingCount)
  6685. {
  6686. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6687. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  6688. VulkanBuffer *currentVulkanBuffer;
  6689. VkBuffer *buffers = SDL_stack_alloc(VkBuffer, bindingCount);
  6690. VkDeviceSize *offsets = SDL_stack_alloc(VkDeviceSize, bindingCount);
  6691. Uint32 i;
  6692. for (i = 0; i < bindingCount; i += 1) {
  6693. currentVulkanBuffer = ((VulkanBufferContainer *)pBindings[i].buffer)->activeBufferHandle->vulkanBuffer;
  6694. buffers[i] = currentVulkanBuffer->buffer;
  6695. offsets[i] = (VkDeviceSize)pBindings[i].offset;
  6696. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, currentVulkanBuffer);
  6697. }
  6698. renderer->vkCmdBindVertexBuffers(
  6699. vulkanCommandBuffer->commandBuffer,
  6700. firstBinding,
  6701. bindingCount,
  6702. buffers,
  6703. offsets);
  6704. SDL_stack_free(buffers);
  6705. SDL_stack_free(offsets);
  6706. }
  6707. static void VULKAN_BindIndexBuffer(
  6708. SDL_GPUCommandBuffer *commandBuffer,
  6709. SDL_GPUBufferBinding *pBinding,
  6710. SDL_GPUIndexElementSize indexElementSize)
  6711. {
  6712. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6713. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  6714. VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)pBinding->buffer)->activeBufferHandle->vulkanBuffer;
  6715. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
  6716. renderer->vkCmdBindIndexBuffer(
  6717. vulkanCommandBuffer->commandBuffer,
  6718. vulkanBuffer->buffer,
  6719. (VkDeviceSize)pBinding->offset,
  6720. SDLToVK_IndexType[indexElementSize]);
  6721. }
  6722. static void VULKAN_PushVertexUniformData(
  6723. SDL_GPUCommandBuffer *commandBuffer,
  6724. Uint32 slotIndex,
  6725. const void *data,
  6726. Uint32 dataLengthInBytes)
  6727. {
  6728. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6729. VULKAN_INTERNAL_PushUniformData(
  6730. vulkanCommandBuffer,
  6731. VULKAN_UNIFORM_BUFFER_STAGE_VERTEX,
  6732. slotIndex,
  6733. data,
  6734. dataLengthInBytes);
  6735. }
  6736. static void VULKAN_PushFragmentUniformData(
  6737. SDL_GPUCommandBuffer *commandBuffer,
  6738. Uint32 slotIndex,
  6739. const void *data,
  6740. Uint32 dataLengthInBytes)
  6741. {
  6742. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6743. VULKAN_INTERNAL_PushUniformData(
  6744. vulkanCommandBuffer,
  6745. VULKAN_UNIFORM_BUFFER_STAGE_FRAGMENT,
  6746. slotIndex,
  6747. data,
  6748. dataLengthInBytes);
  6749. }
  6750. static void VULKAN_EndRenderPass(
  6751. SDL_GPUCommandBuffer *commandBuffer)
  6752. {
  6753. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6754. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  6755. Uint32 i;
  6756. renderer->vkCmdEndRenderPass(
  6757. vulkanCommandBuffer->commandBuffer);
  6758. for (i = 0; i < vulkanCommandBuffer->colorAttachmentSubresourceCount; i += 1) {
  6759. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  6760. renderer,
  6761. vulkanCommandBuffer,
  6762. VULKAN_TEXTURE_USAGE_MODE_COLOR_ATTACHMENT,
  6763. vulkanCommandBuffer->colorAttachmentSubresources[i]);
  6764. }
  6765. vulkanCommandBuffer->colorAttachmentSubresourceCount = 0;
  6766. if (vulkanCommandBuffer->depthStencilAttachmentSubresource != NULL) {
  6767. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  6768. renderer,
  6769. vulkanCommandBuffer,
  6770. VULKAN_TEXTURE_USAGE_MODE_DEPTH_STENCIL_ATTACHMENT,
  6771. vulkanCommandBuffer->depthStencilAttachmentSubresource);
  6772. vulkanCommandBuffer->depthStencilAttachmentSubresource = NULL;
  6773. }
  6774. vulkanCommandBuffer->currentGraphicsPipeline = NULL;
  6775. vulkanCommandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE;
  6776. vulkanCommandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE;
  6777. vulkanCommandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE;
  6778. vulkanCommandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE;
  6779. // Reset bind state
  6780. SDL_zeroa(vulkanCommandBuffer->colorAttachmentSubresources);
  6781. vulkanCommandBuffer->depthStencilAttachmentSubresource = NULL;
  6782. SDL_zeroa(vulkanCommandBuffer->vertexSamplers);
  6783. SDL_zeroa(vulkanCommandBuffer->vertexSamplerTextures);
  6784. SDL_zeroa(vulkanCommandBuffer->vertexStorageTextures);
  6785. SDL_zeroa(vulkanCommandBuffer->vertexStorageBuffers);
  6786. SDL_zeroa(vulkanCommandBuffer->fragmentSamplers);
  6787. SDL_zeroa(vulkanCommandBuffer->fragmentSamplerTextures);
  6788. SDL_zeroa(vulkanCommandBuffer->fragmentStorageTextures);
  6789. SDL_zeroa(vulkanCommandBuffer->fragmentStorageBuffers);
  6790. }
  6791. static void VULKAN_BeginComputePass(
  6792. SDL_GPUCommandBuffer *commandBuffer,
  6793. SDL_GPUStorageTextureWriteOnlyBinding *storageTextureBindings,
  6794. Uint32 storageTextureBindingCount,
  6795. SDL_GPUStorageBufferWriteOnlyBinding *storageBufferBindings,
  6796. Uint32 storageBufferBindingCount)
  6797. {
  6798. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6799. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6800. VulkanBufferContainer *bufferContainer;
  6801. VulkanBuffer *buffer;
  6802. Uint32 i;
  6803. vulkanCommandBuffer->writeOnlyComputeStorageTextureSubresourceCount = storageTextureBindingCount;
  6804. for (i = 0; i < storageTextureBindingCount; i += 1) {
  6805. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextureBindings[i].texture;
  6806. if (!(textureContainer->activeTextureHandle->vulkanTexture->usageFlags & SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE_BIT)) {
  6807. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Attempted to bind read-only texture as compute write texture");
  6808. }
  6809. VulkanTextureSubresource *subresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  6810. renderer,
  6811. vulkanCommandBuffer,
  6812. textureContainer,
  6813. storageTextureBindings[i].layer,
  6814. storageTextureBindings[i].mipLevel,
  6815. storageTextureBindings[i].cycle,
  6816. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE);
  6817. vulkanCommandBuffer->writeOnlyComputeStorageTextureSubresources[i] = subresource;
  6818. VULKAN_INTERNAL_TrackTexture(
  6819. vulkanCommandBuffer,
  6820. subresource->parent);
  6821. }
  6822. for (i = 0; i < storageBufferBindingCount; i += 1) {
  6823. bufferContainer = (VulkanBufferContainer *)storageBufferBindings[i].buffer;
  6824. buffer = VULKAN_INTERNAL_PrepareBufferForWrite(
  6825. renderer,
  6826. vulkanCommandBuffer,
  6827. bufferContainer,
  6828. storageBufferBindings[i].cycle,
  6829. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ);
  6830. vulkanCommandBuffer->writeOnlyComputeStorageBuffers[i] = buffer;
  6831. VULKAN_INTERNAL_TrackBuffer(
  6832. vulkanCommandBuffer,
  6833. buffer);
  6834. }
  6835. }
  6836. static void VULKAN_BindComputePipeline(
  6837. SDL_GPUCommandBuffer *commandBuffer,
  6838. SDL_GPUComputePipeline *computePipeline)
  6839. {
  6840. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6841. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  6842. VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline *)computePipeline;
  6843. renderer->vkCmdBindPipeline(
  6844. vulkanCommandBuffer->commandBuffer,
  6845. VK_PIPELINE_BIND_POINT_COMPUTE,
  6846. vulkanComputePipeline->pipeline);
  6847. vulkanCommandBuffer->currentComputePipeline = vulkanComputePipeline;
  6848. VULKAN_INTERNAL_TrackComputePipeline(vulkanCommandBuffer, vulkanComputePipeline);
  6849. // Acquire uniform buffers if necessary
  6850. for (Uint32 i = 0; i < vulkanComputePipeline->resourceLayout.uniformBufferCount; i += 1) {
  6851. if (vulkanCommandBuffer->computeUniformBuffers[i] == NULL) {
  6852. vulkanCommandBuffer->computeUniformBuffers[i] = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
  6853. vulkanCommandBuffer);
  6854. }
  6855. }
  6856. // Mark binding as needed
  6857. vulkanCommandBuffer->needNewComputeWriteOnlyDescriptorSet = true;
  6858. vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  6859. vulkanCommandBuffer->needNewComputeUniformDescriptorSet = true;
  6860. vulkanCommandBuffer->needNewComputeUniformOffsets = true;
  6861. }
  6862. static void VULKAN_BindComputeStorageTextures(
  6863. SDL_GPUCommandBuffer *commandBuffer,
  6864. Uint32 firstSlot,
  6865. SDL_GPUTexture **storageTextures,
  6866. Uint32 bindingCount)
  6867. {
  6868. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6869. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6870. for (Uint32 i = 0; i < bindingCount; i += 1) {
  6871. if (vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i] != NULL) {
  6872. VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
  6873. renderer,
  6874. vulkanCommandBuffer,
  6875. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
  6876. vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i]);
  6877. }
  6878. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)storageTextures[i];
  6879. vulkanCommandBuffer->readOnlyComputeStorageTextures[firstSlot + i] =
  6880. textureContainer->activeTextureHandle->vulkanTexture;
  6881. VULKAN_INTERNAL_TextureTransitionFromDefaultUsage(
  6882. renderer,
  6883. vulkanCommandBuffer,
  6884. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
  6885. textureContainer->activeTextureHandle->vulkanTexture);
  6886. VULKAN_INTERNAL_TrackTexture(
  6887. vulkanCommandBuffer,
  6888. textureContainer->activeTextureHandle->vulkanTexture);
  6889. }
  6890. vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  6891. }
  6892. static void VULKAN_BindComputeStorageBuffers(
  6893. SDL_GPUCommandBuffer *commandBuffer,
  6894. Uint32 firstSlot,
  6895. SDL_GPUBuffer **storageBuffers,
  6896. Uint32 bindingCount)
  6897. {
  6898. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6899. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  6900. VulkanBufferContainer *bufferContainer;
  6901. Uint32 i;
  6902. for (i = 0; i < bindingCount; i += 1) {
  6903. if (vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i] != NULL) {
  6904. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  6905. renderer,
  6906. vulkanCommandBuffer,
  6907. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
  6908. vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i]);
  6909. }
  6910. bufferContainer = (VulkanBufferContainer *)storageBuffers[i];
  6911. vulkanCommandBuffer->readOnlyComputeStorageBuffers[firstSlot + i] = bufferContainer->activeBufferHandle->vulkanBuffer;
  6912. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  6913. renderer,
  6914. vulkanCommandBuffer,
  6915. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
  6916. bufferContainer->activeBufferHandle->vulkanBuffer);
  6917. VULKAN_INTERNAL_TrackBuffer(
  6918. vulkanCommandBuffer,
  6919. bufferContainer->activeBufferHandle->vulkanBuffer);
  6920. }
  6921. vulkanCommandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  6922. }
  6923. static void VULKAN_PushComputeUniformData(
  6924. SDL_GPUCommandBuffer *commandBuffer,
  6925. Uint32 slotIndex,
  6926. const void *data,
  6927. Uint32 dataLengthInBytes)
  6928. {
  6929. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  6930. VULKAN_INTERNAL_PushUniformData(
  6931. vulkanCommandBuffer,
  6932. VULKAN_UNIFORM_BUFFER_STAGE_COMPUTE,
  6933. slotIndex,
  6934. data,
  6935. dataLengthInBytes);
  6936. }
  6937. static void VULKAN_INTERNAL_BindComputeDescriptorSets(
  6938. VulkanRenderer *renderer,
  6939. VulkanCommandBuffer *commandBuffer)
  6940. {
  6941. VulkanComputePipelineResourceLayout *resourceLayout;
  6942. VkWriteDescriptorSet *writeDescriptorSets;
  6943. VkWriteDescriptorSet *currentWriteDescriptorSet;
  6944. DescriptorSetPool *descriptorSetPool;
  6945. VkDescriptorBufferInfo bufferInfos[MAX_STORAGE_BUFFERS_PER_STAGE]; // 8 is max for both read and write
  6946. VkDescriptorImageInfo imageInfos[MAX_STORAGE_TEXTURES_PER_STAGE]; // 8 is max for both read and write
  6947. Uint32 dynamicOffsets[MAX_UNIFORM_BUFFERS_PER_STAGE];
  6948. Uint32 bufferInfoCount = 0;
  6949. Uint32 imageInfoCount = 0;
  6950. Uint32 i;
  6951. resourceLayout = &commandBuffer->currentComputePipeline->resourceLayout;
  6952. if (commandBuffer->needNewComputeReadOnlyDescriptorSet) {
  6953. descriptorSetPool = &resourceLayout->descriptorSetPools[0];
  6954. commandBuffer->computeReadOnlyDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  6955. renderer,
  6956. commandBuffer,
  6957. descriptorSetPool);
  6958. writeDescriptorSets = SDL_stack_alloc(
  6959. VkWriteDescriptorSet,
  6960. resourceLayout->readOnlyStorageTextureCount +
  6961. resourceLayout->readOnlyStorageBufferCount);
  6962. for (i = 0; i < resourceLayout->readOnlyStorageTextureCount; i += 1) {
  6963. currentWriteDescriptorSet = &writeDescriptorSets[i];
  6964. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  6965. currentWriteDescriptorSet->pNext = NULL;
  6966. currentWriteDescriptorSet->descriptorCount = 1;
  6967. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  6968. currentWriteDescriptorSet->dstArrayElement = 0;
  6969. currentWriteDescriptorSet->dstBinding = i;
  6970. currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet;
  6971. currentWriteDescriptorSet->pTexelBufferView = NULL;
  6972. currentWriteDescriptorSet->pBufferInfo = NULL;
  6973. imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
  6974. imageInfos[imageInfoCount].imageView = commandBuffer->readOnlyComputeStorageTextures[i]->fullView;
  6975. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  6976. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  6977. imageInfoCount += 1;
  6978. }
  6979. for (i = 0; i < resourceLayout->readOnlyStorageBufferCount; i += 1) {
  6980. currentWriteDescriptorSet = &writeDescriptorSets[resourceLayout->readOnlyStorageTextureCount + i];
  6981. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  6982. currentWriteDescriptorSet->pNext = NULL;
  6983. currentWriteDescriptorSet->descriptorCount = 1;
  6984. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  6985. currentWriteDescriptorSet->dstArrayElement = 0;
  6986. currentWriteDescriptorSet->dstBinding = resourceLayout->readOnlyStorageTextureCount + i;
  6987. currentWriteDescriptorSet->dstSet = commandBuffer->computeReadOnlyDescriptorSet;
  6988. currentWriteDescriptorSet->pTexelBufferView = NULL;
  6989. currentWriteDescriptorSet->pImageInfo = NULL;
  6990. bufferInfos[bufferInfoCount].buffer = commandBuffer->readOnlyComputeStorageBuffers[i]->buffer;
  6991. bufferInfos[bufferInfoCount].offset = 0;
  6992. bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
  6993. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  6994. bufferInfoCount += 1;
  6995. }
  6996. renderer->vkUpdateDescriptorSets(
  6997. renderer->logicalDevice,
  6998. resourceLayout->readOnlyStorageTextureCount + resourceLayout->readOnlyStorageBufferCount,
  6999. writeDescriptorSets,
  7000. 0,
  7001. NULL);
  7002. renderer->vkCmdBindDescriptorSets(
  7003. commandBuffer->commandBuffer,
  7004. VK_PIPELINE_BIND_POINT_COMPUTE,
  7005. resourceLayout->pipelineLayout,
  7006. 0,
  7007. 1,
  7008. &commandBuffer->computeReadOnlyDescriptorSet,
  7009. 0,
  7010. NULL);
  7011. SDL_stack_free(writeDescriptorSets);
  7012. bufferInfoCount = 0;
  7013. imageInfoCount = 0;
  7014. commandBuffer->needNewComputeReadOnlyDescriptorSet = false;
  7015. }
  7016. if (commandBuffer->needNewComputeWriteOnlyDescriptorSet) {
  7017. descriptorSetPool = &resourceLayout->descriptorSetPools[1];
  7018. commandBuffer->computeWriteOnlyDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  7019. renderer,
  7020. commandBuffer,
  7021. descriptorSetPool);
  7022. writeDescriptorSets = SDL_stack_alloc(
  7023. VkWriteDescriptorSet,
  7024. resourceLayout->writeOnlyStorageTextureCount +
  7025. resourceLayout->writeOnlyStorageBufferCount);
  7026. for (i = 0; i < resourceLayout->writeOnlyStorageTextureCount; i += 1) {
  7027. currentWriteDescriptorSet = &writeDescriptorSets[i];
  7028. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  7029. currentWriteDescriptorSet->pNext = NULL;
  7030. currentWriteDescriptorSet->descriptorCount = 1;
  7031. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  7032. currentWriteDescriptorSet->dstArrayElement = 0;
  7033. currentWriteDescriptorSet->dstBinding = i;
  7034. currentWriteDescriptorSet->dstSet = commandBuffer->computeWriteOnlyDescriptorSet;
  7035. currentWriteDescriptorSet->pTexelBufferView = NULL;
  7036. currentWriteDescriptorSet->pBufferInfo = NULL;
  7037. imageInfos[imageInfoCount].sampler = VK_NULL_HANDLE;
  7038. imageInfos[imageInfoCount].imageView = commandBuffer->writeOnlyComputeStorageTextureSubresources[i]->computeWriteView;
  7039. imageInfos[imageInfoCount].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  7040. currentWriteDescriptorSet->pImageInfo = &imageInfos[imageInfoCount];
  7041. imageInfoCount += 1;
  7042. }
  7043. for (i = 0; i < resourceLayout->writeOnlyStorageBufferCount; i += 1) {
  7044. currentWriteDescriptorSet = &writeDescriptorSets[resourceLayout->writeOnlyStorageTextureCount + i];
  7045. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  7046. currentWriteDescriptorSet->pNext = NULL;
  7047. currentWriteDescriptorSet->descriptorCount = 1;
  7048. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  7049. currentWriteDescriptorSet->dstArrayElement = 0;
  7050. currentWriteDescriptorSet->dstBinding = resourceLayout->writeOnlyStorageTextureCount + i;
  7051. currentWriteDescriptorSet->dstSet = commandBuffer->computeWriteOnlyDescriptorSet;
  7052. currentWriteDescriptorSet->pTexelBufferView = NULL;
  7053. currentWriteDescriptorSet->pImageInfo = NULL;
  7054. bufferInfos[bufferInfoCount].buffer = commandBuffer->writeOnlyComputeStorageBuffers[i]->buffer;
  7055. bufferInfos[bufferInfoCount].offset = 0;
  7056. bufferInfos[bufferInfoCount].range = VK_WHOLE_SIZE;
  7057. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  7058. bufferInfoCount += 1;
  7059. }
  7060. renderer->vkUpdateDescriptorSets(
  7061. renderer->logicalDevice,
  7062. resourceLayout->writeOnlyStorageTextureCount + resourceLayout->writeOnlyStorageBufferCount,
  7063. writeDescriptorSets,
  7064. 0,
  7065. NULL);
  7066. renderer->vkCmdBindDescriptorSets(
  7067. commandBuffer->commandBuffer,
  7068. VK_PIPELINE_BIND_POINT_COMPUTE,
  7069. resourceLayout->pipelineLayout,
  7070. 1,
  7071. 1,
  7072. &commandBuffer->computeWriteOnlyDescriptorSet,
  7073. 0,
  7074. NULL);
  7075. SDL_stack_free(writeDescriptorSets);
  7076. bufferInfoCount = 0;
  7077. imageInfoCount = 0;
  7078. commandBuffer->needNewComputeWriteOnlyDescriptorSet = false;
  7079. }
  7080. if (commandBuffer->needNewComputeUniformDescriptorSet) {
  7081. descriptorSetPool = &resourceLayout->descriptorSetPools[2];
  7082. commandBuffer->computeUniformDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
  7083. renderer,
  7084. commandBuffer,
  7085. descriptorSetPool);
  7086. writeDescriptorSets = SDL_stack_alloc(
  7087. VkWriteDescriptorSet,
  7088. resourceLayout->uniformBufferCount);
  7089. for (i = 0; i < resourceLayout->uniformBufferCount; i += 1) {
  7090. currentWriteDescriptorSet = &writeDescriptorSets[i];
  7091. currentWriteDescriptorSet->sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  7092. currentWriteDescriptorSet->pNext = NULL;
  7093. currentWriteDescriptorSet->descriptorCount = 1;
  7094. currentWriteDescriptorSet->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
  7095. currentWriteDescriptorSet->dstArrayElement = 0;
  7096. currentWriteDescriptorSet->dstBinding = i;
  7097. currentWriteDescriptorSet->dstSet = commandBuffer->computeUniformDescriptorSet;
  7098. currentWriteDescriptorSet->pTexelBufferView = NULL;
  7099. currentWriteDescriptorSet->pImageInfo = NULL;
  7100. bufferInfos[bufferInfoCount].buffer = commandBuffer->computeUniformBuffers[i]->bufferHandle->vulkanBuffer->buffer;
  7101. bufferInfos[bufferInfoCount].offset = 0;
  7102. bufferInfos[bufferInfoCount].range = MAX_UBO_SECTION_SIZE;
  7103. currentWriteDescriptorSet->pBufferInfo = &bufferInfos[bufferInfoCount];
  7104. bufferInfoCount += 1;
  7105. }
  7106. renderer->vkUpdateDescriptorSets(
  7107. renderer->logicalDevice,
  7108. resourceLayout->uniformBufferCount,
  7109. writeDescriptorSets,
  7110. 0,
  7111. NULL);
  7112. SDL_stack_free(writeDescriptorSets);
  7113. bufferInfoCount = 0;
  7114. imageInfoCount = 0;
  7115. commandBuffer->needNewComputeUniformDescriptorSet = false;
  7116. commandBuffer->needNewComputeUniformOffsets = true;
  7117. }
  7118. if (commandBuffer->needNewComputeUniformOffsets) {
  7119. for (i = 0; i < resourceLayout->uniformBufferCount; i += 1) {
  7120. dynamicOffsets[i] = commandBuffer->computeUniformBuffers[i]->drawOffset;
  7121. }
  7122. renderer->vkCmdBindDescriptorSets(
  7123. commandBuffer->commandBuffer,
  7124. VK_PIPELINE_BIND_POINT_COMPUTE,
  7125. resourceLayout->pipelineLayout,
  7126. 2,
  7127. 1,
  7128. &commandBuffer->computeUniformDescriptorSet,
  7129. resourceLayout->uniformBufferCount,
  7130. dynamicOffsets);
  7131. commandBuffer->needNewComputeUniformOffsets = false;
  7132. }
  7133. }
  7134. static void VULKAN_DispatchCompute(
  7135. SDL_GPUCommandBuffer *commandBuffer,
  7136. Uint32 groupCountX,
  7137. Uint32 groupCountY,
  7138. Uint32 groupCountZ)
  7139. {
  7140. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7141. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  7142. VULKAN_INTERNAL_BindComputeDescriptorSets(renderer, vulkanCommandBuffer);
  7143. renderer->vkCmdDispatch(
  7144. vulkanCommandBuffer->commandBuffer,
  7145. groupCountX,
  7146. groupCountY,
  7147. groupCountZ);
  7148. }
  7149. static void VULKAN_DispatchComputeIndirect(
  7150. SDL_GPUCommandBuffer *commandBuffer,
  7151. SDL_GPUBuffer *buffer,
  7152. Uint32 offsetInBytes)
  7153. {
  7154. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7155. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  7156. VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer *)buffer)->activeBufferHandle->vulkanBuffer;
  7157. VULKAN_INTERNAL_BindComputeDescriptorSets(renderer, vulkanCommandBuffer);
  7158. renderer->vkCmdDispatchIndirect(
  7159. vulkanCommandBuffer->commandBuffer,
  7160. vulkanBuffer->buffer,
  7161. offsetInBytes);
  7162. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
  7163. }
  7164. static void VULKAN_EndComputePass(
  7165. SDL_GPUCommandBuffer *commandBuffer)
  7166. {
  7167. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7168. Uint32 i;
  7169. for (i = 0; i < vulkanCommandBuffer->writeOnlyComputeStorageTextureSubresourceCount; i += 1) {
  7170. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7171. vulkanCommandBuffer->renderer,
  7172. vulkanCommandBuffer,
  7173. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
  7174. vulkanCommandBuffer->writeOnlyComputeStorageTextureSubresources[i]);
  7175. vulkanCommandBuffer->writeOnlyComputeStorageTextureSubresources[i] = NULL;
  7176. }
  7177. vulkanCommandBuffer->writeOnlyComputeStorageTextureSubresourceCount = 0;
  7178. for (i = 0; i < MAX_COMPUTE_WRITE_BUFFERS; i += 1) {
  7179. if (vulkanCommandBuffer->writeOnlyComputeStorageBuffers[i] != NULL) {
  7180. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7181. vulkanCommandBuffer->renderer,
  7182. vulkanCommandBuffer,
  7183. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ_WRITE,
  7184. vulkanCommandBuffer->writeOnlyComputeStorageBuffers[i]);
  7185. vulkanCommandBuffer->writeOnlyComputeStorageBuffers[i] = NULL;
  7186. }
  7187. }
  7188. for (i = 0; i < MAX_STORAGE_TEXTURES_PER_STAGE; i += 1) {
  7189. if (vulkanCommandBuffer->readOnlyComputeStorageTextures[i] != NULL) {
  7190. VULKAN_INTERNAL_TextureTransitionToDefaultUsage(
  7191. vulkanCommandBuffer->renderer,
  7192. vulkanCommandBuffer,
  7193. VULKAN_TEXTURE_USAGE_MODE_COMPUTE_STORAGE_READ,
  7194. vulkanCommandBuffer->readOnlyComputeStorageTextures[i]);
  7195. vulkanCommandBuffer->readOnlyComputeStorageTextures[i] = NULL;
  7196. }
  7197. }
  7198. for (i = 0; i < MAX_STORAGE_BUFFERS_PER_STAGE; i += 1) {
  7199. if (vulkanCommandBuffer->readOnlyComputeStorageBuffers[i] != NULL) {
  7200. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7201. vulkanCommandBuffer->renderer,
  7202. vulkanCommandBuffer,
  7203. VULKAN_BUFFER_USAGE_MODE_COMPUTE_STORAGE_READ,
  7204. vulkanCommandBuffer->readOnlyComputeStorageBuffers[i]);
  7205. vulkanCommandBuffer->readOnlyComputeStorageBuffers[i] = NULL;
  7206. }
  7207. }
  7208. vulkanCommandBuffer->currentComputePipeline = NULL;
  7209. vulkanCommandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE;
  7210. vulkanCommandBuffer->computeWriteOnlyDescriptorSet = VK_NULL_HANDLE;
  7211. vulkanCommandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE;
  7212. }
  7213. static void *VULKAN_MapTransferBuffer(
  7214. SDL_GPURenderer *driverData,
  7215. SDL_GPUTransferBuffer *transferBuffer,
  7216. bool cycle)
  7217. {
  7218. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  7219. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)transferBuffer;
  7220. if (
  7221. cycle &&
  7222. SDL_AtomicGet(&transferBufferContainer->activeBufferHandle->vulkanBuffer->referenceCount) > 0) {
  7223. VULKAN_INTERNAL_CycleActiveBuffer(
  7224. renderer,
  7225. transferBufferContainer);
  7226. }
  7227. Uint8 *bufferPointer =
  7228. transferBufferContainer->activeBufferHandle->vulkanBuffer->usedRegion->allocation->mapPointer +
  7229. transferBufferContainer->activeBufferHandle->vulkanBuffer->usedRegion->resourceOffset;
  7230. return bufferPointer;
  7231. }
  7232. static void VULKAN_UnmapTransferBuffer(
  7233. SDL_GPURenderer *driverData,
  7234. SDL_GPUTransferBuffer *transferBuffer)
  7235. {
  7236. // no-op because transfer buffers are persistently mapped
  7237. (void)driverData;
  7238. (void)transferBuffer;
  7239. }
  7240. static void VULKAN_BeginCopyPass(
  7241. SDL_GPUCommandBuffer *commandBuffer)
  7242. {
  7243. // no-op
  7244. (void)commandBuffer;
  7245. }
  7246. static void VULKAN_UploadToTexture(
  7247. SDL_GPUCommandBuffer *commandBuffer,
  7248. SDL_GPUTextureTransferInfo *source,
  7249. SDL_GPUTextureRegion *destination,
  7250. bool cycle)
  7251. {
  7252. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7253. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  7254. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)source->transferBuffer;
  7255. VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer *)destination->texture;
  7256. VulkanTextureSubresource *vulkanTextureSubresource;
  7257. VkBufferImageCopy imageCopy;
  7258. // Note that the transfer buffer does not need a barrier, as it is synced by the client
  7259. vulkanTextureSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  7260. renderer,
  7261. vulkanCommandBuffer,
  7262. vulkanTextureContainer,
  7263. destination->layer,
  7264. destination->mipLevel,
  7265. cycle,
  7266. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION);
  7267. imageCopy.imageExtent.width = destination->w;
  7268. imageCopy.imageExtent.height = destination->h;
  7269. imageCopy.imageExtent.depth = destination->d;
  7270. imageCopy.imageOffset.x = destination->x;
  7271. imageCopy.imageOffset.y = destination->y;
  7272. imageCopy.imageOffset.z = destination->z;
  7273. imageCopy.imageSubresource.aspectMask = vulkanTextureSubresource->parent->aspectFlags;
  7274. imageCopy.imageSubresource.baseArrayLayer = destination->layer;
  7275. imageCopy.imageSubresource.layerCount = 1;
  7276. imageCopy.imageSubresource.mipLevel = destination->mipLevel;
  7277. imageCopy.bufferOffset = source->offset;
  7278. imageCopy.bufferRowLength = source->imagePitch;
  7279. imageCopy.bufferImageHeight = source->imageHeight;
  7280. renderer->vkCmdCopyBufferToImage(
  7281. vulkanCommandBuffer->commandBuffer,
  7282. transferBufferContainer->activeBufferHandle->vulkanBuffer->buffer,
  7283. vulkanTextureSubresource->parent->image,
  7284. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  7285. 1,
  7286. &imageCopy);
  7287. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7288. renderer,
  7289. vulkanCommandBuffer,
  7290. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  7291. vulkanTextureSubresource);
  7292. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBufferHandle->vulkanBuffer);
  7293. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, vulkanTextureSubresource->parent);
  7294. }
  7295. static void VULKAN_UploadToBuffer(
  7296. SDL_GPUCommandBuffer *commandBuffer,
  7297. SDL_GPUTransferBufferLocation *source,
  7298. SDL_GPUBufferRegion *destination,
  7299. bool cycle)
  7300. {
  7301. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7302. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  7303. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)source->transferBuffer;
  7304. VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)destination->buffer;
  7305. VkBufferCopy bufferCopy;
  7306. // Note that the transfer buffer does not need a barrier, as it is synced by the client
  7307. VulkanBuffer *vulkanBuffer = VULKAN_INTERNAL_PrepareBufferForWrite(
  7308. renderer,
  7309. vulkanCommandBuffer,
  7310. bufferContainer,
  7311. cycle,
  7312. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION);
  7313. bufferCopy.srcOffset = source->offset;
  7314. bufferCopy.dstOffset = destination->offset;
  7315. bufferCopy.size = destination->size;
  7316. renderer->vkCmdCopyBuffer(
  7317. vulkanCommandBuffer->commandBuffer,
  7318. transferBufferContainer->activeBufferHandle->vulkanBuffer->buffer,
  7319. vulkanBuffer->buffer,
  7320. 1,
  7321. &bufferCopy);
  7322. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7323. renderer,
  7324. vulkanCommandBuffer,
  7325. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
  7326. vulkanBuffer);
  7327. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBufferHandle->vulkanBuffer);
  7328. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, vulkanBuffer);
  7329. }
  7330. // Readback
  7331. static void VULKAN_DownloadFromTexture(
  7332. SDL_GPUCommandBuffer *commandBuffer,
  7333. SDL_GPUTextureRegion *source,
  7334. SDL_GPUTextureTransferInfo *destination)
  7335. {
  7336. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7337. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7338. VulkanTextureContainer *textureContainer = (VulkanTextureContainer *)source->texture;
  7339. VulkanTextureSubresource *vulkanTextureSubresource;
  7340. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)destination->transferBuffer;
  7341. VkBufferImageCopy imageCopy;
  7342. vulkanTextureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  7343. textureContainer,
  7344. source->layer,
  7345. source->mipLevel);
  7346. // Note that the transfer buffer does not need a barrier, as it is synced by the client
  7347. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  7348. renderer,
  7349. vulkanCommandBuffer,
  7350. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7351. vulkanTextureSubresource);
  7352. imageCopy.imageExtent.width = source->w;
  7353. imageCopy.imageExtent.height = source->h;
  7354. imageCopy.imageExtent.depth = source->d;
  7355. imageCopy.imageOffset.x = source->x;
  7356. imageCopy.imageOffset.y = source->y;
  7357. imageCopy.imageOffset.z = source->z;
  7358. imageCopy.imageSubresource.aspectMask = vulkanTextureSubresource->parent->aspectFlags;
  7359. imageCopy.imageSubresource.baseArrayLayer = source->layer;
  7360. imageCopy.imageSubresource.layerCount = 1;
  7361. imageCopy.imageSubresource.mipLevel = source->mipLevel;
  7362. imageCopy.bufferOffset = destination->offset;
  7363. imageCopy.bufferRowLength = destination->imagePitch;
  7364. imageCopy.bufferImageHeight = destination->imageHeight;
  7365. renderer->vkCmdCopyImageToBuffer(
  7366. vulkanCommandBuffer->commandBuffer,
  7367. vulkanTextureSubresource->parent->image,
  7368. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  7369. transferBufferContainer->activeBufferHandle->vulkanBuffer->buffer,
  7370. 1,
  7371. &imageCopy);
  7372. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7373. renderer,
  7374. vulkanCommandBuffer,
  7375. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7376. vulkanTextureSubresource);
  7377. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBufferHandle->vulkanBuffer);
  7378. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, vulkanTextureSubresource->parent);
  7379. }
  7380. static void VULKAN_DownloadFromBuffer(
  7381. SDL_GPUCommandBuffer *commandBuffer,
  7382. SDL_GPUBufferRegion *source,
  7383. SDL_GPUTransferBufferLocation *destination)
  7384. {
  7385. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7386. VulkanRenderer *renderer = vulkanCommandBuffer->renderer;
  7387. VulkanBufferContainer *bufferContainer = (VulkanBufferContainer *)source->buffer;
  7388. VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer *)destination->transferBuffer;
  7389. VkBufferCopy bufferCopy;
  7390. // Note that transfer buffer does not need a barrier, as it is synced by the client
  7391. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  7392. renderer,
  7393. vulkanCommandBuffer,
  7394. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  7395. bufferContainer->activeBufferHandle->vulkanBuffer);
  7396. bufferCopy.srcOffset = source->offset;
  7397. bufferCopy.dstOffset = destination->offset;
  7398. bufferCopy.size = source->size;
  7399. renderer->vkCmdCopyBuffer(
  7400. vulkanCommandBuffer->commandBuffer,
  7401. bufferContainer->activeBufferHandle->vulkanBuffer->buffer,
  7402. transferBufferContainer->activeBufferHandle->vulkanBuffer->buffer,
  7403. 1,
  7404. &bufferCopy);
  7405. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7406. renderer,
  7407. vulkanCommandBuffer,
  7408. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  7409. bufferContainer->activeBufferHandle->vulkanBuffer);
  7410. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, transferBufferContainer->activeBufferHandle->vulkanBuffer);
  7411. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, bufferContainer->activeBufferHandle->vulkanBuffer);
  7412. }
  7413. static void VULKAN_CopyTextureToTexture(
  7414. SDL_GPUCommandBuffer *commandBuffer,
  7415. SDL_GPUTextureLocation *source,
  7416. SDL_GPUTextureLocation *destination,
  7417. Uint32 w,
  7418. Uint32 h,
  7419. Uint32 d,
  7420. bool cycle)
  7421. {
  7422. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7423. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  7424. VulkanTextureSubresource *srcSubresource;
  7425. VulkanTextureSubresource *dstSubresource;
  7426. VkImageCopy imageCopy;
  7427. srcSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  7428. (VulkanTextureContainer *)source->texture,
  7429. source->layer,
  7430. source->mipLevel);
  7431. dstSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  7432. renderer,
  7433. vulkanCommandBuffer,
  7434. (VulkanTextureContainer *)destination->texture,
  7435. destination->layer,
  7436. destination->mipLevel,
  7437. cycle,
  7438. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION);
  7439. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  7440. renderer,
  7441. vulkanCommandBuffer,
  7442. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7443. srcSubresource);
  7444. imageCopy.srcOffset.x = source->x;
  7445. imageCopy.srcOffset.y = source->y;
  7446. imageCopy.srcOffset.z = source->z;
  7447. imageCopy.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags;
  7448. imageCopy.srcSubresource.baseArrayLayer = source->layer;
  7449. imageCopy.srcSubresource.layerCount = 1;
  7450. imageCopy.srcSubresource.mipLevel = source->mipLevel;
  7451. imageCopy.dstOffset.x = destination->x;
  7452. imageCopy.dstOffset.y = destination->y;
  7453. imageCopy.dstOffset.z = destination->z;
  7454. imageCopy.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags;
  7455. imageCopy.dstSubresource.baseArrayLayer = destination->layer;
  7456. imageCopy.dstSubresource.layerCount = 1;
  7457. imageCopy.dstSubresource.mipLevel = destination->mipLevel;
  7458. imageCopy.extent.width = w;
  7459. imageCopy.extent.height = h;
  7460. imageCopy.extent.depth = d;
  7461. renderer->vkCmdCopyImage(
  7462. vulkanCommandBuffer->commandBuffer,
  7463. srcSubresource->parent->image,
  7464. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  7465. dstSubresource->parent->image,
  7466. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  7467. 1,
  7468. &imageCopy);
  7469. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7470. renderer,
  7471. vulkanCommandBuffer,
  7472. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7473. srcSubresource);
  7474. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7475. renderer,
  7476. vulkanCommandBuffer,
  7477. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  7478. dstSubresource);
  7479. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcSubresource->parent);
  7480. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstSubresource->parent);
  7481. }
  7482. static void VULKAN_CopyBufferToBuffer(
  7483. SDL_GPUCommandBuffer *commandBuffer,
  7484. SDL_GPUBufferLocation *source,
  7485. SDL_GPUBufferLocation *destination,
  7486. Uint32 size,
  7487. bool cycle)
  7488. {
  7489. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7490. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  7491. VulkanBufferContainer *srcContainer = (VulkanBufferContainer *)source->buffer;
  7492. VulkanBufferContainer *dstContainer = (VulkanBufferContainer *)destination->buffer;
  7493. VkBufferCopy bufferCopy;
  7494. VulkanBuffer *dstBuffer = VULKAN_INTERNAL_PrepareBufferForWrite(
  7495. renderer,
  7496. vulkanCommandBuffer,
  7497. dstContainer,
  7498. cycle,
  7499. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION);
  7500. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  7501. renderer,
  7502. vulkanCommandBuffer,
  7503. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  7504. srcContainer->activeBufferHandle->vulkanBuffer);
  7505. bufferCopy.srcOffset = source->offset;
  7506. bufferCopy.dstOffset = destination->offset;
  7507. bufferCopy.size = size;
  7508. renderer->vkCmdCopyBuffer(
  7509. vulkanCommandBuffer->commandBuffer,
  7510. srcContainer->activeBufferHandle->vulkanBuffer->buffer,
  7511. dstBuffer->buffer,
  7512. 1,
  7513. &bufferCopy);
  7514. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7515. renderer,
  7516. vulkanCommandBuffer,
  7517. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  7518. srcContainer->activeBufferHandle->vulkanBuffer);
  7519. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  7520. renderer,
  7521. vulkanCommandBuffer,
  7522. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
  7523. dstBuffer);
  7524. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, srcContainer->activeBufferHandle->vulkanBuffer);
  7525. VULKAN_INTERNAL_TrackBuffer(vulkanCommandBuffer, dstBuffer);
  7526. }
  7527. static void VULKAN_GenerateMipmaps(
  7528. SDL_GPUCommandBuffer *commandBuffer,
  7529. SDL_GPUTexture *texture)
  7530. {
  7531. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7532. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  7533. VulkanTexture *vulkanTexture = ((VulkanTextureContainer *)texture)->activeTextureHandle->vulkanTexture;
  7534. VulkanTextureSubresource *srcTextureSubresource;
  7535. VulkanTextureSubresource *dstTextureSubresource;
  7536. VkImageBlit blit;
  7537. // Blit each slice sequentially. Barriers, barriers everywhere!
  7538. for (Uint32 layerOrDepthIndex = 0; layerOrDepthIndex < vulkanTexture->layerCount; layerOrDepthIndex += 1)
  7539. for (Uint32 level = 1; level < vulkanTexture->levelCount; level += 1) {
  7540. Uint32 layer = vulkanTexture->type == SDL_GPU_TEXTURETYPE_3D ? 0 : layerOrDepthIndex;
  7541. Uint32 depth = vulkanTexture->type == SDL_GPU_TEXTURETYPE_3D ? layerOrDepthIndex : 0;
  7542. Uint32 srcSubresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex(
  7543. level - 1,
  7544. layer,
  7545. vulkanTexture->levelCount);
  7546. Uint32 dstSubresourceIndex = VULKAN_INTERNAL_GetTextureSubresourceIndex(
  7547. level,
  7548. layer,
  7549. vulkanTexture->levelCount);
  7550. srcTextureSubresource = &vulkanTexture->subresources[srcSubresourceIndex];
  7551. dstTextureSubresource = &vulkanTexture->subresources[dstSubresourceIndex];
  7552. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  7553. renderer,
  7554. vulkanCommandBuffer,
  7555. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7556. srcTextureSubresource);
  7557. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  7558. renderer,
  7559. vulkanCommandBuffer,
  7560. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  7561. dstTextureSubresource);
  7562. blit.srcOffsets[0].x = 0;
  7563. blit.srcOffsets[0].y = 0;
  7564. blit.srcOffsets[0].z = depth;
  7565. blit.srcOffsets[1].x = vulkanTexture->dimensions.width >> (level - 1);
  7566. blit.srcOffsets[1].y = vulkanTexture->dimensions.height >> (level - 1);
  7567. blit.srcOffsets[1].z = depth + 1;
  7568. blit.dstOffsets[0].x = 0;
  7569. blit.dstOffsets[0].y = 0;
  7570. blit.dstOffsets[0].z = depth;
  7571. blit.dstOffsets[1].x = vulkanTexture->dimensions.width >> level;
  7572. blit.dstOffsets[1].y = vulkanTexture->dimensions.height >> level;
  7573. blit.dstOffsets[1].z = depth + 1;
  7574. blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  7575. blit.srcSubresource.baseArrayLayer = layer;
  7576. blit.srcSubresource.layerCount = 1;
  7577. blit.srcSubresource.mipLevel = level - 1;
  7578. blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  7579. blit.dstSubresource.baseArrayLayer = layer;
  7580. blit.dstSubresource.layerCount = 1;
  7581. blit.dstSubresource.mipLevel = level;
  7582. renderer->vkCmdBlitImage(
  7583. vulkanCommandBuffer->commandBuffer,
  7584. vulkanTexture->image,
  7585. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  7586. vulkanTexture->image,
  7587. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  7588. 1,
  7589. &blit,
  7590. VK_FILTER_LINEAR);
  7591. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7592. renderer,
  7593. vulkanCommandBuffer,
  7594. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7595. srcTextureSubresource);
  7596. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7597. renderer,
  7598. vulkanCommandBuffer,
  7599. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  7600. dstTextureSubresource);
  7601. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcTextureSubresource->parent);
  7602. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstTextureSubresource->parent);
  7603. }
  7604. }
  7605. static void VULKAN_EndCopyPass(
  7606. SDL_GPUCommandBuffer *commandBuffer)
  7607. {
  7608. // no-op
  7609. (void)commandBuffer;
  7610. }
  7611. static void VULKAN_Blit(
  7612. SDL_GPUCommandBuffer *commandBuffer,
  7613. SDL_GPUBlitRegion *source,
  7614. SDL_GPUBlitRegion *destination,
  7615. SDL_FlipMode flipMode,
  7616. SDL_GPUFilter filterMode,
  7617. bool cycle)
  7618. {
  7619. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  7620. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  7621. TextureCommonHeader *srcHeader = (TextureCommonHeader *)source->texture;
  7622. TextureCommonHeader *dstHeader = (TextureCommonHeader *)destination->texture;
  7623. VkImageBlit region;
  7624. Uint32 srcLayer = srcHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : source->layerOrDepthPlane;
  7625. Uint32 srcDepth = srcHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? source->layerOrDepthPlane : 0;
  7626. Uint32 dstLayer = dstHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? 0 : destination->layerOrDepthPlane;
  7627. Uint32 dstDepth = dstHeader->info.type == SDL_GPU_TEXTURETYPE_3D ? destination->layerOrDepthPlane : 0;
  7628. int32_t swap;
  7629. VulkanTextureSubresource *srcSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  7630. (VulkanTextureContainer *)source->texture,
  7631. srcLayer,
  7632. source->mipLevel);
  7633. VulkanTextureSubresource *dstSubresource = VULKAN_INTERNAL_PrepareTextureSubresourceForWrite(
  7634. renderer,
  7635. vulkanCommandBuffer,
  7636. (VulkanTextureContainer *)destination->texture,
  7637. dstLayer,
  7638. destination->mipLevel,
  7639. cycle,
  7640. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION);
  7641. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  7642. renderer,
  7643. vulkanCommandBuffer,
  7644. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7645. srcSubresource);
  7646. region.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags;
  7647. region.srcSubresource.baseArrayLayer = srcSubresource->layer;
  7648. region.srcSubresource.layerCount = 1;
  7649. region.srcSubresource.mipLevel = srcSubresource->level;
  7650. region.srcOffsets[0].x = source->x;
  7651. region.srcOffsets[0].y = source->y;
  7652. region.srcOffsets[0].z = srcDepth;
  7653. region.srcOffsets[1].x = source->x + source->w;
  7654. region.srcOffsets[1].y = source->y + source->h;
  7655. region.srcOffsets[1].z = srcDepth + 1;
  7656. if (flipMode & SDL_FLIP_HORIZONTAL) {
  7657. // flip the x positions
  7658. swap = region.srcOffsets[0].x;
  7659. region.srcOffsets[0].x = region.srcOffsets[1].x;
  7660. region.srcOffsets[1].x = swap;
  7661. }
  7662. if (flipMode & SDL_FLIP_VERTICAL) {
  7663. // flip the y positions
  7664. swap = region.srcOffsets[0].y;
  7665. region.srcOffsets[0].y = region.srcOffsets[1].y;
  7666. region.srcOffsets[1].y = swap;
  7667. }
  7668. region.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags;
  7669. region.dstSubresource.baseArrayLayer = dstSubresource->layer;
  7670. region.dstSubresource.layerCount = 1;
  7671. region.dstSubresource.mipLevel = dstSubresource->level;
  7672. region.dstOffsets[0].x = destination->x;
  7673. region.dstOffsets[0].y = destination->y;
  7674. region.dstOffsets[0].z = dstDepth;
  7675. region.dstOffsets[1].x = destination->x + destination->w;
  7676. region.dstOffsets[1].y = destination->y + destination->h;
  7677. region.dstOffsets[1].z = dstDepth + 1;
  7678. renderer->vkCmdBlitImage(
  7679. vulkanCommandBuffer->commandBuffer,
  7680. srcSubresource->parent->image,
  7681. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  7682. dstSubresource->parent->image,
  7683. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  7684. 1,
  7685. &region,
  7686. SDLToVK_Filter[filterMode]);
  7687. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7688. renderer,
  7689. vulkanCommandBuffer,
  7690. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  7691. srcSubresource);
  7692. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  7693. renderer,
  7694. vulkanCommandBuffer,
  7695. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  7696. dstSubresource);
  7697. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, srcSubresource->parent);
  7698. VULKAN_INTERNAL_TrackTexture(vulkanCommandBuffer, dstSubresource->parent);
  7699. }
  7700. static void VULKAN_INTERNAL_AllocateCommandBuffers(
  7701. VulkanRenderer *renderer,
  7702. VulkanCommandPool *vulkanCommandPool,
  7703. Uint32 allocateCount)
  7704. {
  7705. VkCommandBufferAllocateInfo allocateInfo;
  7706. VkResult vulkanResult;
  7707. Uint32 i;
  7708. VkCommandBuffer *commandBuffers = SDL_stack_alloc(VkCommandBuffer, allocateCount);
  7709. VulkanCommandBuffer *commandBuffer;
  7710. vulkanCommandPool->inactiveCommandBufferCapacity += allocateCount;
  7711. vulkanCommandPool->inactiveCommandBuffers = SDL_realloc(
  7712. vulkanCommandPool->inactiveCommandBuffers,
  7713. sizeof(VulkanCommandBuffer *) *
  7714. vulkanCommandPool->inactiveCommandBufferCapacity);
  7715. allocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  7716. allocateInfo.pNext = NULL;
  7717. allocateInfo.commandPool = vulkanCommandPool->commandPool;
  7718. allocateInfo.commandBufferCount = allocateCount;
  7719. allocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  7720. vulkanResult = renderer->vkAllocateCommandBuffers(
  7721. renderer->logicalDevice,
  7722. &allocateInfo,
  7723. commandBuffers);
  7724. if (vulkanResult != VK_SUCCESS) {
  7725. LogVulkanResultAsError("vkAllocateCommandBuffers", vulkanResult);
  7726. SDL_stack_free(commandBuffers);
  7727. return;
  7728. }
  7729. for (i = 0; i < allocateCount; i += 1) {
  7730. commandBuffer = SDL_malloc(sizeof(VulkanCommandBuffer));
  7731. commandBuffer->renderer = renderer;
  7732. commandBuffer->commandPool = vulkanCommandPool;
  7733. commandBuffer->commandBuffer = commandBuffers[i];
  7734. commandBuffer->inFlightFence = VK_NULL_HANDLE;
  7735. // Presentation tracking
  7736. commandBuffer->presentDataCapacity = 1;
  7737. commandBuffer->presentDataCount = 0;
  7738. commandBuffer->presentDatas = SDL_malloc(
  7739. commandBuffer->presentDataCapacity * sizeof(VulkanPresentData));
  7740. commandBuffer->waitSemaphoreCapacity = 1;
  7741. commandBuffer->waitSemaphoreCount = 0;
  7742. commandBuffer->waitSemaphores = SDL_malloc(
  7743. commandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore));
  7744. commandBuffer->signalSemaphoreCapacity = 1;
  7745. commandBuffer->signalSemaphoreCount = 0;
  7746. commandBuffer->signalSemaphores = SDL_malloc(
  7747. commandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore));
  7748. // Descriptor set tracking
  7749. commandBuffer->boundDescriptorSetDataCapacity = 16;
  7750. commandBuffer->boundDescriptorSetDataCount = 0;
  7751. commandBuffer->boundDescriptorSetDatas = SDL_malloc(
  7752. commandBuffer->boundDescriptorSetDataCapacity * sizeof(DescriptorSetData));
  7753. // Resource bind tracking
  7754. commandBuffer->needNewVertexResourceDescriptorSet = true;
  7755. commandBuffer->needNewVertexUniformDescriptorSet = true;
  7756. commandBuffer->needNewVertexUniformOffsets = true;
  7757. commandBuffer->needNewFragmentResourceDescriptorSet = true;
  7758. commandBuffer->needNewFragmentUniformDescriptorSet = true;
  7759. commandBuffer->needNewFragmentUniformOffsets = true;
  7760. commandBuffer->needNewComputeWriteOnlyDescriptorSet = true;
  7761. commandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  7762. commandBuffer->needNewComputeUniformDescriptorSet = true;
  7763. commandBuffer->needNewComputeUniformOffsets = true;
  7764. commandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE;
  7765. commandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE;
  7766. commandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE;
  7767. commandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE;
  7768. commandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE;
  7769. commandBuffer->computeWriteOnlyDescriptorSet = VK_NULL_HANDLE;
  7770. commandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE;
  7771. // Resource tracking
  7772. commandBuffer->usedBufferCapacity = 4;
  7773. commandBuffer->usedBufferCount = 0;
  7774. commandBuffer->usedBuffers = SDL_malloc(
  7775. commandBuffer->usedBufferCapacity * sizeof(VulkanBuffer *));
  7776. commandBuffer->usedTextureCapacity = 4;
  7777. commandBuffer->usedTextureCount = 0;
  7778. commandBuffer->usedTextures = SDL_malloc(
  7779. commandBuffer->usedTextureCapacity * sizeof(VulkanTexture *));
  7780. commandBuffer->usedSamplerCapacity = 4;
  7781. commandBuffer->usedSamplerCount = 0;
  7782. commandBuffer->usedSamplers = SDL_malloc(
  7783. commandBuffer->usedSamplerCapacity * sizeof(VulkanSampler *));
  7784. commandBuffer->usedGraphicsPipelineCapacity = 4;
  7785. commandBuffer->usedGraphicsPipelineCount = 0;
  7786. commandBuffer->usedGraphicsPipelines = SDL_malloc(
  7787. commandBuffer->usedGraphicsPipelineCapacity * sizeof(VulkanGraphicsPipeline *));
  7788. commandBuffer->usedComputePipelineCapacity = 4;
  7789. commandBuffer->usedComputePipelineCount = 0;
  7790. commandBuffer->usedComputePipelines = SDL_malloc(
  7791. commandBuffer->usedComputePipelineCapacity * sizeof(VulkanComputePipeline *));
  7792. commandBuffer->usedFramebufferCapacity = 4;
  7793. commandBuffer->usedFramebufferCount = 0;
  7794. commandBuffer->usedFramebuffers = SDL_malloc(
  7795. commandBuffer->usedFramebufferCapacity * sizeof(VulkanFramebuffer *));
  7796. commandBuffer->usedUniformBufferCapacity = 4;
  7797. commandBuffer->usedUniformBufferCount = 0;
  7798. commandBuffer->usedUniformBuffers = SDL_malloc(
  7799. commandBuffer->usedUniformBufferCapacity * sizeof(VulkanUniformBuffer *));
  7800. // Pool it!
  7801. vulkanCommandPool->inactiveCommandBuffers[vulkanCommandPool->inactiveCommandBufferCount] = commandBuffer;
  7802. vulkanCommandPool->inactiveCommandBufferCount += 1;
  7803. }
  7804. SDL_stack_free(commandBuffers);
  7805. }
  7806. static VulkanCommandPool *VULKAN_INTERNAL_FetchCommandPool(
  7807. VulkanRenderer *renderer,
  7808. SDL_ThreadID threadID)
  7809. {
  7810. VulkanCommandPool *vulkanCommandPool = NULL;
  7811. VkCommandPoolCreateInfo commandPoolCreateInfo;
  7812. VkResult vulkanResult;
  7813. CommandPoolHashTableKey key;
  7814. key.threadID = threadID;
  7815. bool result = SDL_FindInHashTable(
  7816. renderer->commandPoolHashTable,
  7817. (const void *)&key,
  7818. (const void **)&vulkanCommandPool);
  7819. if (result) {
  7820. return vulkanCommandPool;
  7821. }
  7822. vulkanCommandPool = (VulkanCommandPool *)SDL_malloc(sizeof(VulkanCommandPool));
  7823. commandPoolCreateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  7824. commandPoolCreateInfo.pNext = NULL;
  7825. commandPoolCreateInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  7826. commandPoolCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex;
  7827. vulkanResult = renderer->vkCreateCommandPool(
  7828. renderer->logicalDevice,
  7829. &commandPoolCreateInfo,
  7830. NULL,
  7831. &vulkanCommandPool->commandPool);
  7832. if (vulkanResult != VK_SUCCESS) {
  7833. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create command pool!");
  7834. LogVulkanResultAsError("vkCreateCommandPool", vulkanResult);
  7835. return NULL;
  7836. }
  7837. vulkanCommandPool->threadID = threadID;
  7838. vulkanCommandPool->inactiveCommandBufferCapacity = 0;
  7839. vulkanCommandPool->inactiveCommandBufferCount = 0;
  7840. vulkanCommandPool->inactiveCommandBuffers = NULL;
  7841. VULKAN_INTERNAL_AllocateCommandBuffers(
  7842. renderer,
  7843. vulkanCommandPool,
  7844. 2);
  7845. CommandPoolHashTableKey *allocedKey = SDL_malloc(sizeof(CommandPoolHashTableKey));
  7846. allocedKey->threadID = threadID;
  7847. SDL_InsertIntoHashTable(
  7848. renderer->commandPoolHashTable,
  7849. (const void *)allocedKey,
  7850. (const void *)vulkanCommandPool);
  7851. return vulkanCommandPool;
  7852. }
  7853. static VulkanCommandBuffer *VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(
  7854. VulkanRenderer *renderer,
  7855. SDL_ThreadID threadID)
  7856. {
  7857. VulkanCommandPool *commandPool =
  7858. VULKAN_INTERNAL_FetchCommandPool(renderer, threadID);
  7859. VulkanCommandBuffer *commandBuffer;
  7860. if (commandPool == NULL) {
  7861. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to fetch command pool!");
  7862. return NULL;
  7863. }
  7864. if (commandPool->inactiveCommandBufferCount == 0) {
  7865. VULKAN_INTERNAL_AllocateCommandBuffers(
  7866. renderer,
  7867. commandPool,
  7868. commandPool->inactiveCommandBufferCapacity);
  7869. }
  7870. commandBuffer = commandPool->inactiveCommandBuffers[commandPool->inactiveCommandBufferCount - 1];
  7871. commandPool->inactiveCommandBufferCount -= 1;
  7872. return commandBuffer;
  7873. }
  7874. static SDL_GPUCommandBuffer *VULKAN_AcquireCommandBuffer(
  7875. SDL_GPURenderer *driverData)
  7876. {
  7877. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  7878. VkResult result;
  7879. Uint32 i;
  7880. SDL_ThreadID threadID = SDL_GetCurrentThreadID();
  7881. SDL_LockMutex(renderer->acquireCommandBufferLock);
  7882. VulkanCommandBuffer *commandBuffer =
  7883. VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(renderer, threadID);
  7884. SDL_UnlockMutex(renderer->acquireCommandBufferLock);
  7885. if (commandBuffer == NULL) {
  7886. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to acquire command buffer!");
  7887. return NULL;
  7888. }
  7889. // Reset state
  7890. commandBuffer->currentComputePipeline = NULL;
  7891. commandBuffer->currentGraphicsPipeline = NULL;
  7892. for (i = 0; i < MAX_COLOR_TARGET_BINDINGS; i += 1) {
  7893. commandBuffer->colorAttachmentSubresources[i] = NULL;
  7894. }
  7895. for (i = 0; i < MAX_UNIFORM_BUFFERS_PER_STAGE; i += 1) {
  7896. commandBuffer->vertexUniformBuffers[i] = NULL;
  7897. commandBuffer->fragmentUniformBuffers[i] = NULL;
  7898. commandBuffer->computeUniformBuffers[i] = NULL;
  7899. }
  7900. commandBuffer->depthStencilAttachmentSubresource = NULL;
  7901. commandBuffer->needNewVertexResourceDescriptorSet = true;
  7902. commandBuffer->needNewVertexUniformDescriptorSet = true;
  7903. commandBuffer->needNewVertexUniformOffsets = true;
  7904. commandBuffer->needNewFragmentResourceDescriptorSet = true;
  7905. commandBuffer->needNewFragmentUniformDescriptorSet = true;
  7906. commandBuffer->needNewFragmentUniformOffsets = true;
  7907. commandBuffer->needNewComputeReadOnlyDescriptorSet = true;
  7908. commandBuffer->needNewComputeUniformDescriptorSet = true;
  7909. commandBuffer->needNewComputeUniformOffsets = true;
  7910. commandBuffer->vertexResourceDescriptorSet = VK_NULL_HANDLE;
  7911. commandBuffer->vertexUniformDescriptorSet = VK_NULL_HANDLE;
  7912. commandBuffer->fragmentResourceDescriptorSet = VK_NULL_HANDLE;
  7913. commandBuffer->fragmentUniformDescriptorSet = VK_NULL_HANDLE;
  7914. commandBuffer->computeReadOnlyDescriptorSet = VK_NULL_HANDLE;
  7915. commandBuffer->computeWriteOnlyDescriptorSet = VK_NULL_HANDLE;
  7916. commandBuffer->computeUniformDescriptorSet = VK_NULL_HANDLE;
  7917. SDL_zeroa(commandBuffer->vertexSamplerTextures);
  7918. SDL_zeroa(commandBuffer->vertexSamplers);
  7919. SDL_zeroa(commandBuffer->vertexStorageTextures);
  7920. SDL_zeroa(commandBuffer->vertexStorageBuffers);
  7921. SDL_zeroa(commandBuffer->fragmentSamplerTextures);
  7922. SDL_zeroa(commandBuffer->fragmentSamplers);
  7923. SDL_zeroa(commandBuffer->fragmentStorageTextures);
  7924. SDL_zeroa(commandBuffer->fragmentStorageBuffers);
  7925. SDL_zeroa(commandBuffer->writeOnlyComputeStorageTextureSubresources);
  7926. commandBuffer->writeOnlyComputeStorageTextureSubresourceCount = 0;
  7927. SDL_zeroa(commandBuffer->writeOnlyComputeStorageBuffers);
  7928. SDL_zeroa(commandBuffer->readOnlyComputeStorageTextures);
  7929. SDL_zeroa(commandBuffer->readOnlyComputeStorageBuffers);
  7930. commandBuffer->autoReleaseFence = 1;
  7931. commandBuffer->isDefrag = 0;
  7932. /* Reset the command buffer here to avoid resets being called
  7933. * from a separate thread than where the command buffer was acquired
  7934. */
  7935. result = renderer->vkResetCommandBuffer(
  7936. commandBuffer->commandBuffer,
  7937. VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT);
  7938. if (result != VK_SUCCESS) {
  7939. LogVulkanResultAsError("vkResetCommandBuffer", result);
  7940. }
  7941. VULKAN_INTERNAL_BeginCommandBuffer(renderer, commandBuffer);
  7942. return (SDL_GPUCommandBuffer *)commandBuffer;
  7943. }
  7944. static bool VULKAN_QueryFence(
  7945. SDL_GPURenderer *driverData,
  7946. SDL_GPUFence *fence)
  7947. {
  7948. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  7949. VkResult result;
  7950. result = renderer->vkGetFenceStatus(
  7951. renderer->logicalDevice,
  7952. ((VulkanFenceHandle *)fence)->fence);
  7953. if (result == VK_SUCCESS) {
  7954. return 1;
  7955. } else if (result == VK_NOT_READY) {
  7956. return 0;
  7957. } else {
  7958. LogVulkanResultAsError("vkGetFenceStatus", result);
  7959. return 0;
  7960. }
  7961. }
  7962. static void VULKAN_INTERNAL_ReturnFenceToPool(
  7963. VulkanRenderer *renderer,
  7964. VulkanFenceHandle *fenceHandle)
  7965. {
  7966. SDL_LockMutex(renderer->fencePool.lock);
  7967. EXPAND_ARRAY_IF_NEEDED(
  7968. renderer->fencePool.availableFences,
  7969. VulkanFenceHandle *,
  7970. renderer->fencePool.availableFenceCount + 1,
  7971. renderer->fencePool.availableFenceCapacity,
  7972. renderer->fencePool.availableFenceCapacity * 2);
  7973. renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount] = fenceHandle;
  7974. renderer->fencePool.availableFenceCount += 1;
  7975. SDL_UnlockMutex(renderer->fencePool.lock);
  7976. }
  7977. static void VULKAN_ReleaseFence(
  7978. SDL_GPURenderer *driverData,
  7979. SDL_GPUFence *fence)
  7980. {
  7981. VulkanFenceHandle *handle = (VulkanFenceHandle *)fence;
  7982. if (SDL_AtomicDecRef(&handle->referenceCount)) {
  7983. VULKAN_INTERNAL_ReturnFenceToPool((VulkanRenderer *)driverData, handle);
  7984. }
  7985. }
  7986. static WindowData *VULKAN_INTERNAL_FetchWindowData(
  7987. SDL_Window *window)
  7988. {
  7989. SDL_PropertiesID properties = SDL_GetWindowProperties(window);
  7990. return (WindowData *)SDL_GetPointerProperty(properties, WINDOW_PROPERTY_DATA, NULL);
  7991. }
  7992. static SDL_bool VULKAN_INTERNAL_OnWindowResize(void *userdata, SDL_Event *e)
  7993. {
  7994. SDL_Window *w = (SDL_Window *)userdata;
  7995. WindowData *data;
  7996. if (e->type == SDL_EVENT_WINDOW_PIXEL_SIZE_CHANGED) {
  7997. data = VULKAN_INTERNAL_FetchWindowData(w);
  7998. data->needsSwapchainRecreate = true;
  7999. }
  8000. return true;
  8001. }
  8002. static bool VULKAN_SupportsSwapchainComposition(
  8003. SDL_GPURenderer *driverData,
  8004. SDL_Window *window,
  8005. SDL_GPUSwapchainComposition swapchainComposition)
  8006. {
  8007. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8008. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8009. VkSurfaceKHR surface;
  8010. SwapchainSupportDetails supportDetails;
  8011. bool result = false;
  8012. if (windowData == NULL || windowData->swapchainData == NULL) {
  8013. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Must claim window before querying swapchain composition support!");
  8014. return false;
  8015. }
  8016. surface = windowData->swapchainData->surface;
  8017. if (VULKAN_INTERNAL_QuerySwapchainSupport(
  8018. renderer,
  8019. renderer->physicalDevice,
  8020. surface,
  8021. &supportDetails)) {
  8022. result = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
  8023. SwapchainCompositionToFormat[swapchainComposition],
  8024. SwapchainCompositionToColorSpace[swapchainComposition],
  8025. supportDetails.formats,
  8026. supportDetails.formatsLength);
  8027. if (!result) {
  8028. // Let's try again with the fallback format...
  8029. result = VULKAN_INTERNAL_VerifySwapSurfaceFormat(
  8030. SwapchainCompositionToFallbackFormat[swapchainComposition],
  8031. SwapchainCompositionToColorSpace[swapchainComposition],
  8032. supportDetails.formats,
  8033. supportDetails.formatsLength);
  8034. }
  8035. SDL_free(supportDetails.formats);
  8036. SDL_free(supportDetails.presentModes);
  8037. }
  8038. return result;
  8039. }
  8040. static bool VULKAN_SupportsPresentMode(
  8041. SDL_GPURenderer *driverData,
  8042. SDL_Window *window,
  8043. SDL_GPUPresentMode presentMode)
  8044. {
  8045. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8046. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8047. VkSurfaceKHR surface;
  8048. SwapchainSupportDetails supportDetails;
  8049. bool result = false;
  8050. if (windowData == NULL || windowData->swapchainData == NULL) {
  8051. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Must claim window before querying present mode support!");
  8052. return false;
  8053. }
  8054. surface = windowData->swapchainData->surface;
  8055. if (VULKAN_INTERNAL_QuerySwapchainSupport(
  8056. renderer,
  8057. renderer->physicalDevice,
  8058. surface,
  8059. &supportDetails)) {
  8060. result = VULKAN_INTERNAL_VerifySwapPresentMode(
  8061. SDLToVK_PresentMode[presentMode],
  8062. supportDetails.presentModes,
  8063. supportDetails.presentModesLength);
  8064. SDL_free(supportDetails.formats);
  8065. SDL_free(supportDetails.presentModes);
  8066. }
  8067. return result;
  8068. }
  8069. static bool VULKAN_ClaimWindow(
  8070. SDL_GPURenderer *driverData,
  8071. SDL_Window *window)
  8072. {
  8073. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8074. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8075. if (windowData == NULL) {
  8076. windowData = SDL_malloc(sizeof(WindowData));
  8077. windowData->window = window;
  8078. windowData->presentMode = SDL_GPU_PRESENTMODE_VSYNC;
  8079. windowData->swapchainComposition = SDL_GPU_SWAPCHAINCOMPOSITION_SDR;
  8080. if (VULKAN_INTERNAL_CreateSwapchain(renderer, windowData)) {
  8081. SDL_SetPointerProperty(SDL_GetWindowProperties(window), WINDOW_PROPERTY_DATA, windowData);
  8082. if (renderer->claimedWindowCount >= renderer->claimedWindowCapacity) {
  8083. renderer->claimedWindowCapacity *= 2;
  8084. renderer->claimedWindows = SDL_realloc(
  8085. renderer->claimedWindows,
  8086. renderer->claimedWindowCapacity * sizeof(WindowData *));
  8087. }
  8088. renderer->claimedWindows[renderer->claimedWindowCount] = windowData;
  8089. renderer->claimedWindowCount += 1;
  8090. SDL_AddEventWatch(VULKAN_INTERNAL_OnWindowResize, window);
  8091. return 1;
  8092. } else {
  8093. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Could not create swapchain, failed to claim window!");
  8094. SDL_free(windowData);
  8095. return 0;
  8096. }
  8097. } else {
  8098. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Window already claimed!");
  8099. return 0;
  8100. }
  8101. }
  8102. static void VULKAN_ReleaseWindow(
  8103. SDL_GPURenderer *driverData,
  8104. SDL_Window *window)
  8105. {
  8106. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8107. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8108. Uint32 i;
  8109. if (windowData == NULL) {
  8110. return;
  8111. }
  8112. if (windowData->swapchainData != NULL) {
  8113. VULKAN_Wait(driverData);
  8114. for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
  8115. if (windowData->swapchainData->inFlightFences[i] != NULL) {
  8116. VULKAN_ReleaseFence(
  8117. driverData,
  8118. (SDL_GPUFence *)windowData->swapchainData->inFlightFences[i]);
  8119. }
  8120. }
  8121. VULKAN_INTERNAL_DestroySwapchain(
  8122. (VulkanRenderer *)driverData,
  8123. windowData);
  8124. }
  8125. for (i = 0; i < renderer->claimedWindowCount; i += 1) {
  8126. if (renderer->claimedWindows[i]->window == window) {
  8127. renderer->claimedWindows[i] = renderer->claimedWindows[renderer->claimedWindowCount - 1];
  8128. renderer->claimedWindowCount -= 1;
  8129. break;
  8130. }
  8131. }
  8132. SDL_free(windowData);
  8133. SDL_ClearProperty(SDL_GetWindowProperties(window), WINDOW_PROPERTY_DATA);
  8134. SDL_DelEventWatch(VULKAN_INTERNAL_OnWindowResize, window);
  8135. }
  8136. static bool VULKAN_INTERNAL_RecreateSwapchain(
  8137. VulkanRenderer *renderer,
  8138. WindowData *windowData)
  8139. {
  8140. Uint32 i;
  8141. if (windowData->swapchainData != NULL) {
  8142. VULKAN_Wait((SDL_GPURenderer *)renderer);
  8143. for (i = 0; i < MAX_FRAMES_IN_FLIGHT; i += 1) {
  8144. if (windowData->swapchainData->inFlightFences[i] != NULL) {
  8145. VULKAN_ReleaseFence(
  8146. (SDL_GPURenderer *)renderer,
  8147. (SDL_GPUFence *)windowData->swapchainData->inFlightFences[i]);
  8148. }
  8149. }
  8150. }
  8151. VULKAN_INTERNAL_DestroySwapchain(renderer, windowData);
  8152. return VULKAN_INTERNAL_CreateSwapchain(renderer, windowData);
  8153. }
  8154. static SDL_GPUTexture *VULKAN_AcquireSwapchainTexture(
  8155. SDL_GPUCommandBuffer *commandBuffer,
  8156. SDL_Window *window,
  8157. Uint32 *pWidth,
  8158. Uint32 *pHeight)
  8159. {
  8160. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  8161. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  8162. Uint32 swapchainImageIndex;
  8163. WindowData *windowData;
  8164. VulkanSwapchainData *swapchainData;
  8165. VkResult acquireResult = VK_SUCCESS;
  8166. VulkanTextureContainer *swapchainTextureContainer = NULL;
  8167. VulkanPresentData *presentData;
  8168. windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8169. if (windowData == NULL) {
  8170. return NULL;
  8171. }
  8172. swapchainData = windowData->swapchainData;
  8173. // Window is claimed but swapchain is invalid!
  8174. if (swapchainData == NULL) {
  8175. if (SDL_GetWindowFlags(window) & SDL_WINDOW_MINIMIZED) {
  8176. // Window is minimized, don't bother
  8177. return NULL;
  8178. }
  8179. // Let's try to recreate
  8180. VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
  8181. swapchainData = windowData->swapchainData;
  8182. if (swapchainData == NULL) {
  8183. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Failed to recreate swapchain!");
  8184. return NULL;
  8185. }
  8186. }
  8187. if (swapchainData->inFlightFences[swapchainData->frameCounter] != NULL) {
  8188. if (swapchainData->presentMode == VK_PRESENT_MODE_FIFO_KHR) {
  8189. // In VSYNC mode, block until the least recent presented frame is done
  8190. VULKAN_WaitForFences(
  8191. (SDL_GPURenderer *)renderer,
  8192. true,
  8193. (SDL_GPUFence **)&swapchainData->inFlightFences[swapchainData->frameCounter],
  8194. 1);
  8195. } else {
  8196. if (!VULKAN_QueryFence(
  8197. (SDL_GPURenderer *)renderer,
  8198. (SDL_GPUFence *)swapchainData->inFlightFences[swapchainData->frameCounter])) {
  8199. /*
  8200. * In MAILBOX or IMMEDIATE mode, if the least recent fence is not signaled,
  8201. * return NULL to indicate that rendering should be skipped
  8202. */
  8203. return NULL;
  8204. }
  8205. }
  8206. VULKAN_ReleaseFence(
  8207. (SDL_GPURenderer *)renderer,
  8208. (SDL_GPUFence *)swapchainData->inFlightFences[swapchainData->frameCounter]);
  8209. swapchainData->inFlightFences[swapchainData->frameCounter] = NULL;
  8210. }
  8211. // If window data marked as needing swapchain recreate, try to recreate
  8212. if (windowData->needsSwapchainRecreate) {
  8213. VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
  8214. swapchainData = windowData->swapchainData;
  8215. if (swapchainData == NULL) {
  8216. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Failed to recreate swapchain!");
  8217. return NULL;
  8218. }
  8219. }
  8220. // Finally, try to acquire!
  8221. acquireResult = renderer->vkAcquireNextImageKHR(
  8222. renderer->logicalDevice,
  8223. swapchainData->swapchain,
  8224. UINT64_MAX,
  8225. swapchainData->imageAvailableSemaphore[swapchainData->frameCounter],
  8226. VK_NULL_HANDLE,
  8227. &swapchainImageIndex);
  8228. // Acquisition is invalid, let's try to recreate
  8229. if (acquireResult != VK_SUCCESS && acquireResult != VK_SUBOPTIMAL_KHR) {
  8230. VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
  8231. swapchainData = windowData->swapchainData;
  8232. if (swapchainData == NULL) {
  8233. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Failed to recreate swapchain!");
  8234. return NULL;
  8235. }
  8236. acquireResult = renderer->vkAcquireNextImageKHR(
  8237. renderer->logicalDevice,
  8238. swapchainData->swapchain,
  8239. UINT64_MAX,
  8240. swapchainData->imageAvailableSemaphore[swapchainData->frameCounter],
  8241. VK_NULL_HANDLE,
  8242. &swapchainImageIndex);
  8243. if (acquireResult != VK_SUCCESS && acquireResult != VK_SUBOPTIMAL_KHR) {
  8244. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Failed to acquire swapchain texture!");
  8245. return NULL;
  8246. }
  8247. }
  8248. swapchainTextureContainer = &swapchainData->textureContainers[swapchainImageIndex];
  8249. // We need a special execution dependency with pWaitDstStageMask or image transition can start before acquire finishes
  8250. VkImageMemoryBarrier imageBarrier;
  8251. imageBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
  8252. imageBarrier.pNext = NULL;
  8253. imageBarrier.srcAccessMask = 0;
  8254. imageBarrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  8255. imageBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  8256. imageBarrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  8257. imageBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  8258. imageBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  8259. imageBarrier.image = swapchainTextureContainer->activeTextureHandle->vulkanTexture->image;
  8260. imageBarrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  8261. imageBarrier.subresourceRange.baseMipLevel = 0;
  8262. imageBarrier.subresourceRange.levelCount = 1;
  8263. imageBarrier.subresourceRange.baseArrayLayer = 0;
  8264. imageBarrier.subresourceRange.layerCount = 1;
  8265. renderer->vkCmdPipelineBarrier(
  8266. vulkanCommandBuffer->commandBuffer,
  8267. VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  8268. VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
  8269. 0,
  8270. 0,
  8271. NULL,
  8272. 0,
  8273. NULL,
  8274. 1,
  8275. &imageBarrier);
  8276. // Set up present struct
  8277. if (vulkanCommandBuffer->presentDataCount == vulkanCommandBuffer->presentDataCapacity) {
  8278. vulkanCommandBuffer->presentDataCapacity += 1;
  8279. vulkanCommandBuffer->presentDatas = SDL_realloc(
  8280. vulkanCommandBuffer->presentDatas,
  8281. vulkanCommandBuffer->presentDataCapacity * sizeof(VulkanPresentData));
  8282. }
  8283. presentData = &vulkanCommandBuffer->presentDatas[vulkanCommandBuffer->presentDataCount];
  8284. vulkanCommandBuffer->presentDataCount += 1;
  8285. presentData->windowData = windowData;
  8286. presentData->swapchainImageIndex = swapchainImageIndex;
  8287. // Set up present semaphores
  8288. if (vulkanCommandBuffer->waitSemaphoreCount == vulkanCommandBuffer->waitSemaphoreCapacity) {
  8289. vulkanCommandBuffer->waitSemaphoreCapacity += 1;
  8290. vulkanCommandBuffer->waitSemaphores = SDL_realloc(
  8291. vulkanCommandBuffer->waitSemaphores,
  8292. vulkanCommandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore));
  8293. }
  8294. vulkanCommandBuffer->waitSemaphores[vulkanCommandBuffer->waitSemaphoreCount] =
  8295. swapchainData->imageAvailableSemaphore[swapchainData->frameCounter];
  8296. vulkanCommandBuffer->waitSemaphoreCount += 1;
  8297. if (vulkanCommandBuffer->signalSemaphoreCount == vulkanCommandBuffer->signalSemaphoreCapacity) {
  8298. vulkanCommandBuffer->signalSemaphoreCapacity += 1;
  8299. vulkanCommandBuffer->signalSemaphores = SDL_realloc(
  8300. vulkanCommandBuffer->signalSemaphores,
  8301. vulkanCommandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore));
  8302. }
  8303. vulkanCommandBuffer->signalSemaphores[vulkanCommandBuffer->signalSemaphoreCount] =
  8304. swapchainData->renderFinishedSemaphore[swapchainData->frameCounter];
  8305. vulkanCommandBuffer->signalSemaphoreCount += 1;
  8306. *pWidth = swapchainData->textureContainers[swapchainData->frameCounter].header.info.width;
  8307. *pHeight = swapchainData->textureContainers[swapchainData->frameCounter].header.info.height;
  8308. return (SDL_GPUTexture *)swapchainTextureContainer;
  8309. }
  8310. static SDL_GPUTextureFormat VULKAN_GetSwapchainTextureFormat(
  8311. SDL_GPURenderer *driverData,
  8312. SDL_Window *window)
  8313. {
  8314. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8315. if (windowData == NULL) {
  8316. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Cannot get swapchain format, window has not been claimed!");
  8317. return 0;
  8318. }
  8319. if (windowData->swapchainData == NULL) {
  8320. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Cannot get swapchain format, swapchain is currently invalid!");
  8321. return 0;
  8322. }
  8323. return SwapchainCompositionToSDLFormat(
  8324. windowData->swapchainComposition,
  8325. windowData->swapchainData->usingFallbackFormat);
  8326. }
  8327. static bool VULKAN_SetSwapchainParameters(
  8328. SDL_GPURenderer *driverData,
  8329. SDL_Window *window,
  8330. SDL_GPUSwapchainComposition swapchainComposition,
  8331. SDL_GPUPresentMode presentMode)
  8332. {
  8333. WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(window);
  8334. if (windowData == NULL) {
  8335. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Cannot set swapchain parameters on unclaimed window!");
  8336. return false;
  8337. }
  8338. if (!VULKAN_SupportsSwapchainComposition(driverData, window, swapchainComposition)) {
  8339. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Swapchain composition not supported!");
  8340. return false;
  8341. }
  8342. if (!VULKAN_SupportsPresentMode(driverData, window, presentMode)) {
  8343. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Present mode not supported!");
  8344. return false;
  8345. }
  8346. windowData->presentMode = presentMode;
  8347. windowData->swapchainComposition = swapchainComposition;
  8348. return VULKAN_INTERNAL_RecreateSwapchain(
  8349. (VulkanRenderer *)driverData,
  8350. windowData);
  8351. }
  8352. // Submission structure
  8353. static VulkanFenceHandle *VULKAN_INTERNAL_AcquireFenceFromPool(
  8354. VulkanRenderer *renderer)
  8355. {
  8356. VulkanFenceHandle *handle;
  8357. VkFenceCreateInfo fenceCreateInfo;
  8358. VkFence fence;
  8359. VkResult vulkanResult;
  8360. if (renderer->fencePool.availableFenceCount == 0) {
  8361. // Create fence
  8362. fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
  8363. fenceCreateInfo.pNext = NULL;
  8364. fenceCreateInfo.flags = 0;
  8365. vulkanResult = renderer->vkCreateFence(
  8366. renderer->logicalDevice,
  8367. &fenceCreateInfo,
  8368. NULL,
  8369. &fence);
  8370. if (vulkanResult != VK_SUCCESS) {
  8371. LogVulkanResultAsError("vkCreateFence", vulkanResult);
  8372. return NULL;
  8373. }
  8374. handle = SDL_malloc(sizeof(VulkanFenceHandle));
  8375. handle->fence = fence;
  8376. SDL_AtomicSet(&handle->referenceCount, 0);
  8377. return handle;
  8378. }
  8379. SDL_LockMutex(renderer->fencePool.lock);
  8380. handle = renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount - 1];
  8381. renderer->fencePool.availableFenceCount -= 1;
  8382. vulkanResult = renderer->vkResetFences(
  8383. renderer->logicalDevice,
  8384. 1,
  8385. &handle->fence);
  8386. if (vulkanResult != VK_SUCCESS) {
  8387. LogVulkanResultAsError("vkResetFences", vulkanResult);
  8388. }
  8389. SDL_UnlockMutex(renderer->fencePool.lock);
  8390. return handle;
  8391. }
  8392. static void VULKAN_INTERNAL_PerformPendingDestroys(
  8393. VulkanRenderer *renderer)
  8394. {
  8395. SDL_LockMutex(renderer->disposeLock);
  8396. for (Sint32 i = renderer->texturesToDestroyCount - 1; i >= 0; i -= 1) {
  8397. if (SDL_AtomicGet(&renderer->texturesToDestroy[i]->referenceCount) == 0) {
  8398. VULKAN_INTERNAL_DestroyTexture(
  8399. renderer,
  8400. renderer->texturesToDestroy[i]);
  8401. renderer->texturesToDestroy[i] = renderer->texturesToDestroy[renderer->texturesToDestroyCount - 1];
  8402. renderer->texturesToDestroyCount -= 1;
  8403. }
  8404. }
  8405. for (Sint32 i = renderer->buffersToDestroyCount - 1; i >= 0; i -= 1) {
  8406. if (SDL_AtomicGet(&renderer->buffersToDestroy[i]->referenceCount) == 0) {
  8407. VULKAN_INTERNAL_DestroyBuffer(
  8408. renderer,
  8409. renderer->buffersToDestroy[i]);
  8410. renderer->buffersToDestroy[i] = renderer->buffersToDestroy[renderer->buffersToDestroyCount - 1];
  8411. renderer->buffersToDestroyCount -= 1;
  8412. }
  8413. }
  8414. for (Sint32 i = renderer->graphicsPipelinesToDestroyCount - 1; i >= 0; i -= 1) {
  8415. if (SDL_AtomicGet(&renderer->graphicsPipelinesToDestroy[i]->referenceCount) == 0) {
  8416. VULKAN_INTERNAL_DestroyGraphicsPipeline(
  8417. renderer,
  8418. renderer->graphicsPipelinesToDestroy[i]);
  8419. renderer->graphicsPipelinesToDestroy[i] = renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount - 1];
  8420. renderer->graphicsPipelinesToDestroyCount -= 1;
  8421. }
  8422. }
  8423. for (Sint32 i = renderer->computePipelinesToDestroyCount - 1; i >= 0; i -= 1) {
  8424. if (SDL_AtomicGet(&renderer->computePipelinesToDestroy[i]->referenceCount) == 0) {
  8425. VULKAN_INTERNAL_DestroyComputePipeline(
  8426. renderer,
  8427. renderer->computePipelinesToDestroy[i]);
  8428. renderer->computePipelinesToDestroy[i] = renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount - 1];
  8429. renderer->computePipelinesToDestroyCount -= 1;
  8430. }
  8431. }
  8432. for (Sint32 i = renderer->shadersToDestroyCount - 1; i >= 0; i -= 1) {
  8433. if (SDL_AtomicGet(&renderer->shadersToDestroy[i]->referenceCount) == 0) {
  8434. VULKAN_INTERNAL_DestroyShader(
  8435. renderer,
  8436. renderer->shadersToDestroy[i]);
  8437. renderer->shadersToDestroy[i] = renderer->shadersToDestroy[renderer->shadersToDestroyCount - 1];
  8438. renderer->shadersToDestroyCount -= 1;
  8439. }
  8440. }
  8441. for (Sint32 i = renderer->samplersToDestroyCount - 1; i >= 0; i -= 1) {
  8442. if (SDL_AtomicGet(&renderer->samplersToDestroy[i]->referenceCount) == 0) {
  8443. VULKAN_INTERNAL_DestroySampler(
  8444. renderer,
  8445. renderer->samplersToDestroy[i]);
  8446. renderer->samplersToDestroy[i] = renderer->samplersToDestroy[renderer->samplersToDestroyCount - 1];
  8447. renderer->samplersToDestroyCount -= 1;
  8448. }
  8449. }
  8450. for (Sint32 i = renderer->framebuffersToDestroyCount - 1; i >= 0; i -= 1) {
  8451. if (SDL_AtomicGet(&renderer->framebuffersToDestroy[i]->referenceCount) == 0) {
  8452. VULKAN_INTERNAL_DestroyFramebuffer(
  8453. renderer,
  8454. renderer->framebuffersToDestroy[i]);
  8455. renderer->framebuffersToDestroy[i] = renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount - 1];
  8456. renderer->framebuffersToDestroyCount -= 1;
  8457. }
  8458. }
  8459. SDL_UnlockMutex(renderer->disposeLock);
  8460. }
  8461. static void VULKAN_INTERNAL_CleanCommandBuffer(
  8462. VulkanRenderer *renderer,
  8463. VulkanCommandBuffer *commandBuffer)
  8464. {
  8465. Uint32 i;
  8466. DescriptorSetData *descriptorSetData;
  8467. if (commandBuffer->autoReleaseFence) {
  8468. VULKAN_ReleaseFence(
  8469. (SDL_GPURenderer *)renderer,
  8470. (SDL_GPUFence *)commandBuffer->inFlightFence);
  8471. commandBuffer->inFlightFence = NULL;
  8472. }
  8473. // Bound descriptor sets are now available
  8474. for (i = 0; i < commandBuffer->boundDescriptorSetDataCount; i += 1) {
  8475. descriptorSetData = &commandBuffer->boundDescriptorSetDatas[i];
  8476. SDL_LockMutex(descriptorSetData->descriptorSetPool->lock);
  8477. if (descriptorSetData->descriptorSetPool->inactiveDescriptorSetCount == descriptorSetData->descriptorSetPool->inactiveDescriptorSetCapacity) {
  8478. descriptorSetData->descriptorSetPool->inactiveDescriptorSetCapacity *= 2;
  8479. descriptorSetData->descriptorSetPool->inactiveDescriptorSets = SDL_realloc(
  8480. descriptorSetData->descriptorSetPool->inactiveDescriptorSets,
  8481. descriptorSetData->descriptorSetPool->inactiveDescriptorSetCapacity * sizeof(VkDescriptorSet));
  8482. }
  8483. descriptorSetData->descriptorSetPool->inactiveDescriptorSets[descriptorSetData->descriptorSetPool->inactiveDescriptorSetCount] = descriptorSetData->descriptorSet;
  8484. descriptorSetData->descriptorSetPool->inactiveDescriptorSetCount += 1;
  8485. SDL_UnlockMutex(descriptorSetData->descriptorSetPool->lock);
  8486. }
  8487. commandBuffer->boundDescriptorSetDataCount = 0;
  8488. // Uniform buffers are now available
  8489. SDL_LockMutex(renderer->acquireUniformBufferLock);
  8490. for (i = 0; i < commandBuffer->usedUniformBufferCount; i += 1) {
  8491. VULKAN_INTERNAL_ReturnUniformBufferToPool(
  8492. renderer,
  8493. commandBuffer->usedUniformBuffers[i]);
  8494. }
  8495. commandBuffer->usedUniformBufferCount = 0;
  8496. SDL_UnlockMutex(renderer->acquireUniformBufferLock);
  8497. // Decrement reference counts
  8498. for (i = 0; i < commandBuffer->usedBufferCount; i += 1) {
  8499. (void)SDL_AtomicDecRef(&commandBuffer->usedBuffers[i]->referenceCount);
  8500. }
  8501. commandBuffer->usedBufferCount = 0;
  8502. for (i = 0; i < commandBuffer->usedTextureCount; i += 1) {
  8503. (void)SDL_AtomicDecRef(&commandBuffer->usedTextures[i]->referenceCount);
  8504. }
  8505. commandBuffer->usedTextureCount = 0;
  8506. for (i = 0; i < commandBuffer->usedSamplerCount; i += 1) {
  8507. (void)SDL_AtomicDecRef(&commandBuffer->usedSamplers[i]->referenceCount);
  8508. }
  8509. commandBuffer->usedSamplerCount = 0;
  8510. for (i = 0; i < commandBuffer->usedGraphicsPipelineCount; i += 1) {
  8511. (void)SDL_AtomicDecRef(&commandBuffer->usedGraphicsPipelines[i]->referenceCount);
  8512. }
  8513. commandBuffer->usedGraphicsPipelineCount = 0;
  8514. for (i = 0; i < commandBuffer->usedComputePipelineCount; i += 1) {
  8515. (void)SDL_AtomicDecRef(&commandBuffer->usedComputePipelines[i]->referenceCount);
  8516. }
  8517. commandBuffer->usedComputePipelineCount = 0;
  8518. for (i = 0; i < commandBuffer->usedFramebufferCount; i += 1) {
  8519. (void)SDL_AtomicDecRef(&commandBuffer->usedFramebuffers[i]->referenceCount);
  8520. }
  8521. commandBuffer->usedFramebufferCount = 0;
  8522. // Reset presentation data
  8523. commandBuffer->presentDataCount = 0;
  8524. commandBuffer->waitSemaphoreCount = 0;
  8525. commandBuffer->signalSemaphoreCount = 0;
  8526. // Reset defrag state
  8527. if (commandBuffer->isDefrag) {
  8528. renderer->defragInProgress = 0;
  8529. }
  8530. // Return command buffer to pool
  8531. SDL_LockMutex(renderer->acquireCommandBufferLock);
  8532. if (commandBuffer->commandPool->inactiveCommandBufferCount == commandBuffer->commandPool->inactiveCommandBufferCapacity) {
  8533. commandBuffer->commandPool->inactiveCommandBufferCapacity += 1;
  8534. commandBuffer->commandPool->inactiveCommandBuffers = SDL_realloc(
  8535. commandBuffer->commandPool->inactiveCommandBuffers,
  8536. commandBuffer->commandPool->inactiveCommandBufferCapacity * sizeof(VulkanCommandBuffer *));
  8537. }
  8538. commandBuffer->commandPool->inactiveCommandBuffers[commandBuffer->commandPool->inactiveCommandBufferCount] = commandBuffer;
  8539. commandBuffer->commandPool->inactiveCommandBufferCount += 1;
  8540. SDL_UnlockMutex(renderer->acquireCommandBufferLock);
  8541. // Remove this command buffer from the submitted list
  8542. for (i = 0; i < renderer->submittedCommandBufferCount; i += 1) {
  8543. if (renderer->submittedCommandBuffers[i] == commandBuffer) {
  8544. renderer->submittedCommandBuffers[i] = renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount - 1];
  8545. renderer->submittedCommandBufferCount -= 1;
  8546. }
  8547. }
  8548. }
  8549. static void VULKAN_WaitForFences(
  8550. SDL_GPURenderer *driverData,
  8551. bool waitAll,
  8552. SDL_GPUFence **pFences,
  8553. Uint32 fenceCount)
  8554. {
  8555. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8556. VkFence *fences = SDL_stack_alloc(VkFence, fenceCount);
  8557. VkResult result;
  8558. for (Uint32 i = 0; i < fenceCount; i += 1) {
  8559. fences[i] = ((VulkanFenceHandle *)pFences[i])->fence;
  8560. }
  8561. result = renderer->vkWaitForFences(
  8562. renderer->logicalDevice,
  8563. fenceCount,
  8564. fences,
  8565. waitAll,
  8566. UINT64_MAX);
  8567. if (result != VK_SUCCESS) {
  8568. LogVulkanResultAsError("vkWaitForFences", result);
  8569. }
  8570. SDL_stack_free(fences);
  8571. SDL_LockMutex(renderer->submitLock);
  8572. for (Sint32 i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
  8573. result = renderer->vkGetFenceStatus(
  8574. renderer->logicalDevice,
  8575. renderer->submittedCommandBuffers[i]->inFlightFence->fence);
  8576. if (result == VK_SUCCESS) {
  8577. VULKAN_INTERNAL_CleanCommandBuffer(
  8578. renderer,
  8579. renderer->submittedCommandBuffers[i]);
  8580. }
  8581. }
  8582. VULKAN_INTERNAL_PerformPendingDestroys(renderer);
  8583. SDL_UnlockMutex(renderer->submitLock);
  8584. }
  8585. static void VULKAN_Wait(
  8586. SDL_GPURenderer *driverData)
  8587. {
  8588. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8589. VulkanCommandBuffer *commandBuffer;
  8590. VkResult result;
  8591. Sint32 i;
  8592. result = renderer->vkDeviceWaitIdle(renderer->logicalDevice);
  8593. if (result != VK_SUCCESS) {
  8594. LogVulkanResultAsError("vkDeviceWaitIdle", result);
  8595. return;
  8596. }
  8597. SDL_LockMutex(renderer->submitLock);
  8598. for (i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
  8599. commandBuffer = renderer->submittedCommandBuffers[i];
  8600. VULKAN_INTERNAL_CleanCommandBuffer(renderer, commandBuffer);
  8601. }
  8602. VULKAN_INTERNAL_PerformPendingDestroys(renderer);
  8603. SDL_UnlockMutex(renderer->submitLock);
  8604. }
  8605. static SDL_GPUFence *VULKAN_SubmitAndAcquireFence(
  8606. SDL_GPUCommandBuffer *commandBuffer)
  8607. {
  8608. VulkanCommandBuffer *vulkanCommandBuffer;
  8609. vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  8610. vulkanCommandBuffer->autoReleaseFence = 0;
  8611. VULKAN_Submit(commandBuffer);
  8612. return (SDL_GPUFence *)vulkanCommandBuffer->inFlightFence;
  8613. }
  8614. static void VULKAN_Submit(
  8615. SDL_GPUCommandBuffer *commandBuffer)
  8616. {
  8617. VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer *)commandBuffer;
  8618. VulkanRenderer *renderer = (VulkanRenderer *)vulkanCommandBuffer->renderer;
  8619. VkSubmitInfo submitInfo;
  8620. VkPresentInfoKHR presentInfo;
  8621. VulkanPresentData *presentData;
  8622. VkResult vulkanResult, presentResult = VK_SUCCESS;
  8623. VkPipelineStageFlags waitStages[MAX_PRESENT_COUNT];
  8624. Uint32 swapchainImageIndex;
  8625. VulkanTextureSubresource *swapchainTextureSubresource;
  8626. Uint8 commandBufferCleaned = 0;
  8627. VulkanMemorySubAllocator *allocator;
  8628. bool presenting = false;
  8629. SDL_LockMutex(renderer->submitLock);
  8630. // FIXME: Can this just be permanent?
  8631. for (Uint32 i = 0; i < MAX_PRESENT_COUNT; i += 1) {
  8632. waitStages[i] = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  8633. }
  8634. for (Uint32 j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1) {
  8635. swapchainImageIndex = vulkanCommandBuffer->presentDatas[j].swapchainImageIndex;
  8636. swapchainTextureSubresource = VULKAN_INTERNAL_FetchTextureSubresource(
  8637. &vulkanCommandBuffer->presentDatas[j].windowData->swapchainData->textureContainers[swapchainImageIndex],
  8638. 0,
  8639. 0);
  8640. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  8641. renderer,
  8642. vulkanCommandBuffer,
  8643. VULKAN_TEXTURE_USAGE_MODE_PRESENT,
  8644. swapchainTextureSubresource);
  8645. }
  8646. VULKAN_INTERNAL_EndCommandBuffer(renderer, vulkanCommandBuffer);
  8647. vulkanCommandBuffer->inFlightFence = VULKAN_INTERNAL_AcquireFenceFromPool(renderer);
  8648. // Command buffer has a reference to the in-flight fence
  8649. (void)SDL_AtomicIncRef(&vulkanCommandBuffer->inFlightFence->referenceCount);
  8650. submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  8651. submitInfo.pNext = NULL;
  8652. submitInfo.commandBufferCount = 1;
  8653. submitInfo.pCommandBuffers = &vulkanCommandBuffer->commandBuffer;
  8654. submitInfo.pWaitDstStageMask = waitStages;
  8655. submitInfo.pWaitSemaphores = vulkanCommandBuffer->waitSemaphores;
  8656. submitInfo.waitSemaphoreCount = vulkanCommandBuffer->waitSemaphoreCount;
  8657. submitInfo.pSignalSemaphores = vulkanCommandBuffer->signalSemaphores;
  8658. submitInfo.signalSemaphoreCount = vulkanCommandBuffer->signalSemaphoreCount;
  8659. vulkanResult = renderer->vkQueueSubmit(
  8660. renderer->unifiedQueue,
  8661. 1,
  8662. &submitInfo,
  8663. vulkanCommandBuffer->inFlightFence->fence);
  8664. if (vulkanResult != VK_SUCCESS) {
  8665. LogVulkanResultAsError("vkQueueSubmit", vulkanResult);
  8666. }
  8667. // Mark command buffers as submitted
  8668. if (renderer->submittedCommandBufferCount + 1 >= renderer->submittedCommandBufferCapacity) {
  8669. renderer->submittedCommandBufferCapacity = renderer->submittedCommandBufferCount + 1;
  8670. renderer->submittedCommandBuffers = SDL_realloc(
  8671. renderer->submittedCommandBuffers,
  8672. sizeof(VulkanCommandBuffer *) * renderer->submittedCommandBufferCapacity);
  8673. }
  8674. renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount] = vulkanCommandBuffer;
  8675. renderer->submittedCommandBufferCount += 1;
  8676. // Present, if applicable
  8677. for (Uint32 j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1) {
  8678. presenting = true;
  8679. presentData = &vulkanCommandBuffer->presentDatas[j];
  8680. presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
  8681. presentInfo.pNext = NULL;
  8682. presentInfo.pWaitSemaphores =
  8683. &presentData->windowData->swapchainData->renderFinishedSemaphore[presentData->windowData->swapchainData->frameCounter];
  8684. presentInfo.waitSemaphoreCount = 1;
  8685. presentInfo.pSwapchains = &presentData->windowData->swapchainData->swapchain;
  8686. presentInfo.swapchainCount = 1;
  8687. presentInfo.pImageIndices = &presentData->swapchainImageIndex;
  8688. presentInfo.pResults = NULL;
  8689. presentResult = renderer->vkQueuePresentKHR(
  8690. renderer->unifiedQueue,
  8691. &presentInfo);
  8692. presentData->windowData->swapchainData->frameCounter =
  8693. (presentData->windowData->swapchainData->frameCounter + 1) % MAX_FRAMES_IN_FLIGHT;
  8694. if (presentResult != VK_SUCCESS) {
  8695. VULKAN_INTERNAL_RecreateSwapchain(
  8696. renderer,
  8697. presentData->windowData);
  8698. } else {
  8699. // If presenting, the swapchain is using the in-flight fence
  8700. presentData->windowData->swapchainData->inFlightFences[presentData->windowData->swapchainData->frameCounter] = vulkanCommandBuffer->inFlightFence;
  8701. (void)SDL_AtomicIncRef(&vulkanCommandBuffer->inFlightFence->referenceCount);
  8702. }
  8703. }
  8704. // Check if we can perform any cleanups
  8705. for (Sint32 i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1) {
  8706. vulkanResult = renderer->vkGetFenceStatus(
  8707. renderer->logicalDevice,
  8708. renderer->submittedCommandBuffers[i]->inFlightFence->fence);
  8709. if (vulkanResult == VK_SUCCESS) {
  8710. VULKAN_INTERNAL_CleanCommandBuffer(
  8711. renderer,
  8712. renderer->submittedCommandBuffers[i]);
  8713. commandBufferCleaned = 1;
  8714. }
  8715. }
  8716. if (commandBufferCleaned) {
  8717. SDL_LockMutex(renderer->allocatorLock);
  8718. for (Uint32 i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
  8719. allocator = &renderer->memoryAllocator->subAllocators[i];
  8720. for (Sint32 j = allocator->allocationCount - 1; j >= 0; j -= 1) {
  8721. if (allocator->allocations[j]->usedRegionCount == 0) {
  8722. VULKAN_INTERNAL_DeallocateMemory(
  8723. renderer,
  8724. allocator,
  8725. j);
  8726. }
  8727. }
  8728. }
  8729. SDL_UnlockMutex(renderer->allocatorLock);
  8730. }
  8731. // Check pending destroys
  8732. VULKAN_INTERNAL_PerformPendingDestroys(renderer);
  8733. // Defrag!
  8734. if (
  8735. presenting &&
  8736. renderer->allocationsToDefragCount > 0 &&
  8737. !renderer->defragInProgress) {
  8738. VULKAN_INTERNAL_DefragmentMemory(renderer);
  8739. }
  8740. SDL_UnlockMutex(renderer->submitLock);
  8741. }
  8742. static Uint8 VULKAN_INTERNAL_DefragmentMemory(
  8743. VulkanRenderer *renderer)
  8744. {
  8745. VulkanMemoryAllocation *allocation;
  8746. VulkanMemoryUsedRegion *currentRegion;
  8747. VulkanBuffer *newBuffer;
  8748. VulkanTexture *newTexture;
  8749. VkBufferCopy bufferCopy;
  8750. VkImageCopy imageCopy;
  8751. VulkanCommandBuffer *commandBuffer;
  8752. VulkanTextureSubresource *srcSubresource;
  8753. VulkanTextureSubresource *dstSubresource;
  8754. Uint32 i, subresourceIndex;
  8755. SDL_LockMutex(renderer->allocatorLock);
  8756. renderer->defragInProgress = 1;
  8757. commandBuffer = (VulkanCommandBuffer *)VULKAN_AcquireCommandBuffer((SDL_GPURenderer *)renderer);
  8758. if (commandBuffer == NULL) {
  8759. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create defrag command buffer!");
  8760. return 0;
  8761. }
  8762. commandBuffer->isDefrag = 1;
  8763. allocation = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1];
  8764. renderer->allocationsToDefragCount -= 1;
  8765. /* For each used region in the allocation
  8766. * create a new resource, copy the data
  8767. * and re-point the resource containers
  8768. */
  8769. for (i = 0; i < allocation->usedRegionCount; i += 1) {
  8770. currentRegion = allocation->usedRegions[i];
  8771. if (currentRegion->isBuffer && !currentRegion->vulkanBuffer->markedForDestroy) {
  8772. currentRegion->vulkanBuffer->usageFlags |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
  8773. newBuffer = VULKAN_INTERNAL_CreateBuffer(
  8774. renderer,
  8775. currentRegion->vulkanBuffer->size,
  8776. currentRegion->vulkanBuffer->usageFlags,
  8777. currentRegion->vulkanBuffer->type);
  8778. if (newBuffer == NULL) {
  8779. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create defrag buffer!");
  8780. return 0;
  8781. }
  8782. if (
  8783. renderer->debugMode &&
  8784. renderer->supportsDebugUtils &&
  8785. currentRegion->vulkanBuffer->handle != NULL &&
  8786. currentRegion->vulkanBuffer->handle->container != NULL &&
  8787. currentRegion->vulkanBuffer->handle->container->debugName != NULL) {
  8788. VULKAN_INTERNAL_SetBufferName(
  8789. renderer,
  8790. newBuffer,
  8791. currentRegion->vulkanBuffer->handle->container->debugName);
  8792. }
  8793. // Copy buffer contents if necessary
  8794. if (
  8795. currentRegion->vulkanBuffer->type == VULKAN_BUFFER_TYPE_GPU && currentRegion->vulkanBuffer->transitioned) {
  8796. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  8797. renderer,
  8798. commandBuffer,
  8799. VULKAN_BUFFER_USAGE_MODE_COPY_SOURCE,
  8800. currentRegion->vulkanBuffer);
  8801. VULKAN_INTERNAL_BufferTransitionFromDefaultUsage(
  8802. renderer,
  8803. commandBuffer,
  8804. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
  8805. newBuffer);
  8806. bufferCopy.srcOffset = 0;
  8807. bufferCopy.dstOffset = 0;
  8808. bufferCopy.size = currentRegion->resourceSize;
  8809. renderer->vkCmdCopyBuffer(
  8810. commandBuffer->commandBuffer,
  8811. currentRegion->vulkanBuffer->buffer,
  8812. newBuffer->buffer,
  8813. 1,
  8814. &bufferCopy);
  8815. VULKAN_INTERNAL_BufferTransitionToDefaultUsage(
  8816. renderer,
  8817. commandBuffer,
  8818. VULKAN_BUFFER_USAGE_MODE_COPY_DESTINATION,
  8819. newBuffer);
  8820. VULKAN_INTERNAL_TrackBuffer(commandBuffer, currentRegion->vulkanBuffer);
  8821. VULKAN_INTERNAL_TrackBuffer(commandBuffer, newBuffer);
  8822. }
  8823. // re-point original container to new buffer
  8824. if (currentRegion->vulkanBuffer->handle != NULL) {
  8825. newBuffer->handle = currentRegion->vulkanBuffer->handle;
  8826. newBuffer->handle->vulkanBuffer = newBuffer;
  8827. currentRegion->vulkanBuffer->handle = NULL;
  8828. }
  8829. VULKAN_INTERNAL_ReleaseBuffer(renderer, currentRegion->vulkanBuffer);
  8830. } else if (!currentRegion->isBuffer && !currentRegion->vulkanTexture->markedForDestroy) {
  8831. newTexture = VULKAN_INTERNAL_CreateTexture(
  8832. renderer,
  8833. currentRegion->vulkanTexture->dimensions.width,
  8834. currentRegion->vulkanTexture->dimensions.height,
  8835. currentRegion->vulkanTexture->depth,
  8836. currentRegion->vulkanTexture->type,
  8837. currentRegion->vulkanTexture->layerCount,
  8838. currentRegion->vulkanTexture->levelCount,
  8839. currentRegion->vulkanTexture->sampleCount,
  8840. currentRegion->vulkanTexture->format,
  8841. currentRegion->vulkanTexture->swizzle,
  8842. currentRegion->vulkanTexture->aspectFlags,
  8843. currentRegion->vulkanTexture->usageFlags,
  8844. currentRegion->vulkanTexture->isMSAAColorTarget);
  8845. if (newTexture == NULL) {
  8846. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create defrag texture!");
  8847. return 0;
  8848. }
  8849. for (subresourceIndex = 0; subresourceIndex < currentRegion->vulkanTexture->subresourceCount; subresourceIndex += 1) {
  8850. // copy subresource if necessary
  8851. srcSubresource = &currentRegion->vulkanTexture->subresources[subresourceIndex];
  8852. dstSubresource = &newTexture->subresources[subresourceIndex];
  8853. // Set debug name if it exists
  8854. if (
  8855. renderer->debugMode &&
  8856. renderer->supportsDebugUtils &&
  8857. srcSubresource->parent->handle != NULL &&
  8858. srcSubresource->parent->handle->container != NULL &&
  8859. srcSubresource->parent->handle->container->debugName != NULL) {
  8860. VULKAN_INTERNAL_SetTextureName(
  8861. renderer,
  8862. currentRegion->vulkanTexture,
  8863. srcSubresource->parent->handle->container->debugName);
  8864. }
  8865. if (srcSubresource->transitioned) {
  8866. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  8867. renderer,
  8868. commandBuffer,
  8869. VULKAN_TEXTURE_USAGE_MODE_COPY_SOURCE,
  8870. srcSubresource);
  8871. VULKAN_INTERNAL_TextureSubresourceTransitionFromDefaultUsage(
  8872. renderer,
  8873. commandBuffer,
  8874. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  8875. dstSubresource);
  8876. imageCopy.srcOffset.x = 0;
  8877. imageCopy.srcOffset.y = 0;
  8878. imageCopy.srcOffset.z = 0;
  8879. imageCopy.srcSubresource.aspectMask = srcSubresource->parent->aspectFlags;
  8880. imageCopy.srcSubresource.baseArrayLayer = srcSubresource->layer;
  8881. imageCopy.srcSubresource.layerCount = 1;
  8882. imageCopy.srcSubresource.mipLevel = srcSubresource->level;
  8883. imageCopy.extent.width = SDL_max(1, srcSubresource->parent->dimensions.width >> srcSubresource->level);
  8884. imageCopy.extent.height = SDL_max(1, srcSubresource->parent->dimensions.height >> srcSubresource->level);
  8885. imageCopy.extent.depth = srcSubresource->parent->depth;
  8886. imageCopy.dstOffset.x = 0;
  8887. imageCopy.dstOffset.y = 0;
  8888. imageCopy.dstOffset.z = 0;
  8889. imageCopy.dstSubresource.aspectMask = dstSubresource->parent->aspectFlags;
  8890. imageCopy.dstSubresource.baseArrayLayer = dstSubresource->layer;
  8891. imageCopy.dstSubresource.layerCount = 1;
  8892. imageCopy.dstSubresource.mipLevel = dstSubresource->level;
  8893. renderer->vkCmdCopyImage(
  8894. commandBuffer->commandBuffer,
  8895. currentRegion->vulkanTexture->image,
  8896. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  8897. newTexture->image,
  8898. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  8899. 1,
  8900. &imageCopy);
  8901. VULKAN_INTERNAL_TextureSubresourceTransitionToDefaultUsage(
  8902. renderer,
  8903. commandBuffer,
  8904. VULKAN_TEXTURE_USAGE_MODE_COPY_DESTINATION,
  8905. dstSubresource);
  8906. VULKAN_INTERNAL_TrackTexture(commandBuffer, srcSubresource->parent);
  8907. VULKAN_INTERNAL_TrackTexture(commandBuffer, dstSubresource->parent);
  8908. }
  8909. }
  8910. // re-point original container to new texture
  8911. newTexture->handle = currentRegion->vulkanTexture->handle;
  8912. newTexture->handle->vulkanTexture = newTexture;
  8913. currentRegion->vulkanTexture->handle = NULL;
  8914. VULKAN_INTERNAL_ReleaseTexture(renderer, currentRegion->vulkanTexture);
  8915. }
  8916. }
  8917. SDL_UnlockMutex(renderer->allocatorLock);
  8918. VULKAN_Submit(
  8919. (SDL_GPUCommandBuffer *)commandBuffer);
  8920. return 1;
  8921. }
  8922. // Format Info
  8923. static bool VULKAN_SupportsTextureFormat(
  8924. SDL_GPURenderer *driverData,
  8925. SDL_GPUTextureFormat format,
  8926. SDL_GPUTextureType type,
  8927. SDL_GPUTextureUsageFlags usage)
  8928. {
  8929. VulkanRenderer *renderer = (VulkanRenderer *)driverData;
  8930. VkFormat vulkanFormat = SDLToVK_SurfaceFormat[format];
  8931. VkImageUsageFlags vulkanUsage = 0;
  8932. VkImageCreateFlags createFlags = 0;
  8933. VkImageFormatProperties properties;
  8934. VkResult vulkanResult;
  8935. if (usage & SDL_GPU_TEXTUREUSAGE_SAMPLER_BIT) {
  8936. vulkanUsage |= VK_IMAGE_USAGE_SAMPLED_BIT;
  8937. }
  8938. if (usage & SDL_GPU_TEXTUREUSAGE_COLOR_TARGET_BIT) {
  8939. vulkanUsage |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  8940. }
  8941. if (usage & SDL_GPU_TEXTUREUSAGE_DEPTH_STENCIL_TARGET_BIT) {
  8942. vulkanUsage |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  8943. }
  8944. if (usage & (SDL_GPU_TEXTUREUSAGE_GRAPHICS_STORAGE_READ_BIT |
  8945. SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_READ_BIT |
  8946. SDL_GPU_TEXTUREUSAGE_COMPUTE_STORAGE_WRITE_BIT)) {
  8947. vulkanUsage |= VK_IMAGE_USAGE_STORAGE_BIT;
  8948. }
  8949. if (type == SDL_GPU_TEXTURETYPE_CUBE) {
  8950. createFlags = VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
  8951. }
  8952. vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
  8953. renderer->physicalDevice,
  8954. vulkanFormat,
  8955. (type == SDL_GPU_TEXTURETYPE_3D) ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D,
  8956. VK_IMAGE_TILING_OPTIMAL,
  8957. vulkanUsage,
  8958. createFlags,
  8959. &properties);
  8960. return vulkanResult == VK_SUCCESS;
  8961. }
  8962. // Device instantiation
  8963. static inline Uint8 CheckDeviceExtensions(
  8964. VkExtensionProperties *extensions,
  8965. Uint32 numExtensions,
  8966. VulkanExtensions *supports)
  8967. {
  8968. Uint32 i;
  8969. SDL_memset(supports, '\0', sizeof(VulkanExtensions));
  8970. for (i = 0; i < numExtensions; i += 1) {
  8971. const char *name = extensions[i].extensionName;
  8972. #define CHECK(ext) \
  8973. if (SDL_strcmp(name, "VK_" #ext) == 0) { \
  8974. supports->ext = 1; \
  8975. }
  8976. CHECK(KHR_swapchain)
  8977. else CHECK(KHR_maintenance1) else CHECK(KHR_driver_properties) else CHECK(EXT_vertex_attribute_divisor) else CHECK(KHR_portability_subset)
  8978. #undef CHECK
  8979. }
  8980. return (supports->KHR_swapchain &&
  8981. supports->KHR_maintenance1);
  8982. }
  8983. static inline Uint32 GetDeviceExtensionCount(VulkanExtensions *supports)
  8984. {
  8985. return (
  8986. supports->KHR_swapchain +
  8987. supports->KHR_maintenance1 +
  8988. supports->KHR_driver_properties +
  8989. supports->EXT_vertex_attribute_divisor +
  8990. supports->KHR_portability_subset);
  8991. }
  8992. static inline void CreateDeviceExtensionArray(
  8993. VulkanExtensions *supports,
  8994. const char **extensions)
  8995. {
  8996. Uint8 cur = 0;
  8997. #define CHECK(ext) \
  8998. if (supports->ext) { \
  8999. extensions[cur++] = "VK_" #ext; \
  9000. }
  9001. CHECK(KHR_swapchain)
  9002. CHECK(KHR_maintenance1)
  9003. CHECK(KHR_driver_properties)
  9004. CHECK(EXT_vertex_attribute_divisor)
  9005. CHECK(KHR_portability_subset)
  9006. #undef CHECK
  9007. }
  9008. static inline Uint8 SupportsInstanceExtension(
  9009. const char *ext,
  9010. VkExtensionProperties *availableExtensions,
  9011. Uint32 numAvailableExtensions)
  9012. {
  9013. Uint32 i;
  9014. for (i = 0; i < numAvailableExtensions; i += 1) {
  9015. if (SDL_strcmp(ext, availableExtensions[i].extensionName) == 0) {
  9016. return 1;
  9017. }
  9018. }
  9019. return 0;
  9020. }
  9021. static Uint8 VULKAN_INTERNAL_CheckInstanceExtensions(
  9022. const char **requiredExtensions,
  9023. Uint32 requiredExtensionsLength,
  9024. bool *supportsDebugUtils,
  9025. bool *supportsColorspace)
  9026. {
  9027. Uint32 extensionCount, i;
  9028. VkExtensionProperties *availableExtensions;
  9029. Uint8 allExtensionsSupported = 1;
  9030. vkEnumerateInstanceExtensionProperties(
  9031. NULL,
  9032. &extensionCount,
  9033. NULL);
  9034. availableExtensions = SDL_malloc(
  9035. extensionCount * sizeof(VkExtensionProperties));
  9036. vkEnumerateInstanceExtensionProperties(
  9037. NULL,
  9038. &extensionCount,
  9039. availableExtensions);
  9040. for (i = 0; i < requiredExtensionsLength; i += 1) {
  9041. if (!SupportsInstanceExtension(
  9042. requiredExtensions[i],
  9043. availableExtensions,
  9044. extensionCount)) {
  9045. allExtensionsSupported = 0;
  9046. break;
  9047. }
  9048. }
  9049. // This is optional, but nice to have!
  9050. *supportsDebugUtils = SupportsInstanceExtension(
  9051. VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
  9052. availableExtensions,
  9053. extensionCount);
  9054. // Also optional and nice to have!
  9055. *supportsColorspace = SupportsInstanceExtension(
  9056. VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME,
  9057. availableExtensions,
  9058. extensionCount);
  9059. SDL_free(availableExtensions);
  9060. return allExtensionsSupported;
  9061. }
  9062. static Uint8 VULKAN_INTERNAL_CheckDeviceExtensions(
  9063. VulkanRenderer *renderer,
  9064. VkPhysicalDevice physicalDevice,
  9065. VulkanExtensions *physicalDeviceExtensions)
  9066. {
  9067. Uint32 extensionCount;
  9068. VkExtensionProperties *availableExtensions;
  9069. Uint8 allExtensionsSupported;
  9070. renderer->vkEnumerateDeviceExtensionProperties(
  9071. physicalDevice,
  9072. NULL,
  9073. &extensionCount,
  9074. NULL);
  9075. availableExtensions = (VkExtensionProperties *)SDL_malloc(
  9076. extensionCount * sizeof(VkExtensionProperties));
  9077. renderer->vkEnumerateDeviceExtensionProperties(
  9078. physicalDevice,
  9079. NULL,
  9080. &extensionCount,
  9081. availableExtensions);
  9082. allExtensionsSupported = CheckDeviceExtensions(
  9083. availableExtensions,
  9084. extensionCount,
  9085. physicalDeviceExtensions);
  9086. SDL_free(availableExtensions);
  9087. return allExtensionsSupported;
  9088. }
  9089. static Uint8 VULKAN_INTERNAL_CheckValidationLayers(
  9090. const char **validationLayers,
  9091. Uint32 validationLayersLength)
  9092. {
  9093. Uint32 layerCount;
  9094. VkLayerProperties *availableLayers;
  9095. Uint32 i, j;
  9096. Uint8 layerFound = 0;
  9097. vkEnumerateInstanceLayerProperties(&layerCount, NULL);
  9098. availableLayers = (VkLayerProperties *)SDL_malloc(
  9099. layerCount * sizeof(VkLayerProperties));
  9100. vkEnumerateInstanceLayerProperties(&layerCount, availableLayers);
  9101. for (i = 0; i < validationLayersLength; i += 1) {
  9102. layerFound = 0;
  9103. for (j = 0; j < layerCount; j += 1) {
  9104. if (SDL_strcmp(validationLayers[i], availableLayers[j].layerName) == 0) {
  9105. layerFound = 1;
  9106. break;
  9107. }
  9108. }
  9109. if (!layerFound) {
  9110. break;
  9111. }
  9112. }
  9113. SDL_free(availableLayers);
  9114. return layerFound;
  9115. }
  9116. static Uint8 VULKAN_INTERNAL_CreateInstance(VulkanRenderer *renderer)
  9117. {
  9118. VkResult vulkanResult;
  9119. VkApplicationInfo appInfo;
  9120. VkInstanceCreateFlags createFlags;
  9121. const char *const *originalInstanceExtensionNames;
  9122. const char **instanceExtensionNames;
  9123. Uint32 instanceExtensionCount;
  9124. VkInstanceCreateInfo createInfo;
  9125. static const char *layerNames[] = { "VK_LAYER_KHRONOS_validation" };
  9126. appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
  9127. appInfo.pNext = NULL;
  9128. appInfo.pApplicationName = NULL;
  9129. appInfo.applicationVersion = 0;
  9130. appInfo.pEngineName = "SDLGPU";
  9131. appInfo.engineVersion = SDL_VERSION;
  9132. appInfo.apiVersion = VK_MAKE_VERSION(1, 0, 0);
  9133. createFlags = 0;
  9134. originalInstanceExtensionNames = SDL_Vulkan_GetInstanceExtensions(&instanceExtensionCount);
  9135. if (!originalInstanceExtensionNames) {
  9136. SDL_LogError(
  9137. SDL_LOG_CATEGORY_GPU,
  9138. "SDL_Vulkan_GetInstanceExtensions(): getExtensionCount: %s",
  9139. SDL_GetError());
  9140. return 0;
  9141. }
  9142. /* Extra space for the following extensions:
  9143. * VK_KHR_get_physical_device_properties2
  9144. * VK_EXT_swapchain_colorspace
  9145. * VK_EXT_debug_utils
  9146. * VK_KHR_portability_enumeration
  9147. */
  9148. instanceExtensionNames = SDL_stack_alloc(
  9149. const char *,
  9150. instanceExtensionCount + 4);
  9151. SDL_memcpy((void *)instanceExtensionNames, originalInstanceExtensionNames, instanceExtensionCount * sizeof(const char *));
  9152. // Core since 1.1
  9153. instanceExtensionNames[instanceExtensionCount++] =
  9154. VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME;
  9155. #ifdef SDL_PLATFORM_APPLE
  9156. instanceExtensionNames[instanceExtensionCount++] =
  9157. VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME;
  9158. createFlags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
  9159. #endif
  9160. if (!VULKAN_INTERNAL_CheckInstanceExtensions(
  9161. instanceExtensionNames,
  9162. instanceExtensionCount,
  9163. &renderer->supportsDebugUtils,
  9164. &renderer->supportsColorspace)) {
  9165. SDL_LogError(
  9166. SDL_LOG_CATEGORY_GPU,
  9167. "Required Vulkan instance extensions not supported");
  9168. SDL_stack_free((char *)instanceExtensionNames);
  9169. return 0;
  9170. }
  9171. if (renderer->supportsDebugUtils) {
  9172. // Append the debug extension
  9173. instanceExtensionNames[instanceExtensionCount++] =
  9174. VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
  9175. } else {
  9176. SDL_LogWarn(
  9177. SDL_LOG_CATEGORY_GPU,
  9178. "%s is not supported!",
  9179. VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
  9180. }
  9181. if (renderer->supportsColorspace) {
  9182. // Append colorspace extension
  9183. instanceExtensionNames[instanceExtensionCount++] =
  9184. VK_EXT_SWAPCHAIN_COLOR_SPACE_EXTENSION_NAME;
  9185. }
  9186. createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  9187. createInfo.pNext = NULL;
  9188. createInfo.flags = createFlags;
  9189. createInfo.pApplicationInfo = &appInfo;
  9190. createInfo.ppEnabledLayerNames = layerNames;
  9191. createInfo.enabledExtensionCount = instanceExtensionCount;
  9192. createInfo.ppEnabledExtensionNames = instanceExtensionNames;
  9193. if (renderer->debugMode) {
  9194. createInfo.enabledLayerCount = SDL_arraysize(layerNames);
  9195. if (!VULKAN_INTERNAL_CheckValidationLayers(
  9196. layerNames,
  9197. createInfo.enabledLayerCount)) {
  9198. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Validation layers not found, continuing without validation");
  9199. createInfo.enabledLayerCount = 0;
  9200. } else {
  9201. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "Validation layers enabled, expect debug level performance!");
  9202. }
  9203. } else {
  9204. createInfo.enabledLayerCount = 0;
  9205. }
  9206. vulkanResult = vkCreateInstance(&createInfo, NULL, &renderer->instance);
  9207. if (vulkanResult != VK_SUCCESS) {
  9208. SDL_LogError(
  9209. SDL_LOG_CATEGORY_GPU,
  9210. "vkCreateInstance failed: %s",
  9211. VkErrorMessages(vulkanResult));
  9212. SDL_stack_free((char *)instanceExtensionNames);
  9213. return 0;
  9214. }
  9215. SDL_stack_free((char *)instanceExtensionNames);
  9216. return 1;
  9217. }
  9218. static Uint8 VULKAN_INTERNAL_IsDeviceSuitable(
  9219. VulkanRenderer *renderer,
  9220. VkPhysicalDevice physicalDevice,
  9221. VulkanExtensions *physicalDeviceExtensions,
  9222. Uint32 *queueFamilyIndex,
  9223. Uint8 *deviceRank)
  9224. {
  9225. Uint32 queueFamilyCount, queueFamilyRank, queueFamilyBest;
  9226. VkQueueFamilyProperties *queueProps;
  9227. bool supportsPresent;
  9228. VkPhysicalDeviceProperties deviceProperties;
  9229. Uint32 i;
  9230. const Uint8 *devicePriority = renderer->preferLowPower ? DEVICE_PRIORITY_LOWPOWER : DEVICE_PRIORITY_HIGHPERFORMANCE;
  9231. /* Get the device rank before doing any checks, in case one fails.
  9232. * Note: If no dedicated device exists, one that supports our features
  9233. * would be fine
  9234. */
  9235. renderer->vkGetPhysicalDeviceProperties(
  9236. physicalDevice,
  9237. &deviceProperties);
  9238. if (*deviceRank < devicePriority[deviceProperties.deviceType]) {
  9239. /* This device outranks the best device we've found so far!
  9240. * This includes a dedicated GPU that has less features than an
  9241. * integrated GPU, because this is a freak case that is almost
  9242. * never intentionally desired by the end user
  9243. */
  9244. *deviceRank = devicePriority[deviceProperties.deviceType];
  9245. } else if (*deviceRank > devicePriority[deviceProperties.deviceType]) {
  9246. /* Device is outranked by a previous device, don't even try to
  9247. * run a query and reset the rank to avoid overwrites
  9248. */
  9249. *deviceRank = 0;
  9250. return 0;
  9251. }
  9252. if (!VULKAN_INTERNAL_CheckDeviceExtensions(
  9253. renderer,
  9254. physicalDevice,
  9255. physicalDeviceExtensions)) {
  9256. return 0;
  9257. }
  9258. renderer->vkGetPhysicalDeviceQueueFamilyProperties(
  9259. physicalDevice,
  9260. &queueFamilyCount,
  9261. NULL);
  9262. queueProps = (VkQueueFamilyProperties *)SDL_stack_alloc(
  9263. VkQueueFamilyProperties,
  9264. queueFamilyCount);
  9265. renderer->vkGetPhysicalDeviceQueueFamilyProperties(
  9266. physicalDevice,
  9267. &queueFamilyCount,
  9268. queueProps);
  9269. queueFamilyBest = 0;
  9270. *queueFamilyIndex = UINT32_MAX;
  9271. for (i = 0; i < queueFamilyCount; i += 1) {
  9272. supportsPresent = SDL_Vulkan_GetPresentationSupport(
  9273. renderer->instance,
  9274. physicalDevice,
  9275. i);
  9276. if (!supportsPresent ||
  9277. !(queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT)) {
  9278. // Not a graphics family, ignore.
  9279. continue;
  9280. }
  9281. /* The queue family bitflags are kind of annoying.
  9282. *
  9283. * We of course need a graphics family, but we ideally want the
  9284. * _primary_ graphics family. The spec states that at least one
  9285. * graphics family must also be a compute family, so generally
  9286. * drivers make that the first one. But hey, maybe something
  9287. * genuinely can't do compute or something, and FNA doesn't
  9288. * need it, so we'll be open to a non-compute queue family.
  9289. *
  9290. * Additionally, it's common to see the primary queue family
  9291. * have the transfer bit set, which is great! But this is
  9292. * actually optional; it's impossible to NOT have transfers in
  9293. * graphics/compute but it _is_ possible for a graphics/compute
  9294. * family, even the primary one, to just decide not to set the
  9295. * bitflag. Admittedly, a driver may want to isolate transfer
  9296. * queues to a dedicated family so that queues made solely for
  9297. * transfers can have an optimized DMA queue.
  9298. *
  9299. * That, or the driver author got lazy and decided not to set
  9300. * the bit. Looking at you, Android.
  9301. *
  9302. * -flibit
  9303. */
  9304. if (queueProps[i].queueFlags & VK_QUEUE_COMPUTE_BIT) {
  9305. if (queueProps[i].queueFlags & VK_QUEUE_TRANSFER_BIT) {
  9306. // Has all attribs!
  9307. queueFamilyRank = 3;
  9308. } else {
  9309. // Probably has a DMA transfer queue family
  9310. queueFamilyRank = 2;
  9311. }
  9312. } else {
  9313. // Just a graphics family, probably has something better
  9314. queueFamilyRank = 1;
  9315. }
  9316. if (queueFamilyRank > queueFamilyBest) {
  9317. *queueFamilyIndex = i;
  9318. queueFamilyBest = queueFamilyRank;
  9319. }
  9320. }
  9321. SDL_stack_free(queueProps);
  9322. if (*queueFamilyIndex == UINT32_MAX) {
  9323. // Somehow no graphics queues existed. Compute-only device?
  9324. return 0;
  9325. }
  9326. // FIXME: Need better structure for checking vs storing swapchain support details
  9327. return 1;
  9328. }
  9329. static Uint8 VULKAN_INTERNAL_DeterminePhysicalDevice(VulkanRenderer *renderer)
  9330. {
  9331. VkResult vulkanResult;
  9332. VkPhysicalDevice *physicalDevices;
  9333. VulkanExtensions *physicalDeviceExtensions;
  9334. Uint32 physicalDeviceCount, i, suitableIndex;
  9335. Uint32 queueFamilyIndex, suitableQueueFamilyIndex;
  9336. Uint8 deviceRank, highestRank;
  9337. vulkanResult = renderer->vkEnumeratePhysicalDevices(
  9338. renderer->instance,
  9339. &physicalDeviceCount,
  9340. NULL);
  9341. VULKAN_ERROR_CHECK(vulkanResult, vkEnumeratePhysicalDevices, 0)
  9342. if (physicalDeviceCount == 0) {
  9343. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Failed to find any GPUs with Vulkan support");
  9344. return 0;
  9345. }
  9346. physicalDevices = SDL_stack_alloc(VkPhysicalDevice, physicalDeviceCount);
  9347. physicalDeviceExtensions = SDL_stack_alloc(VulkanExtensions, physicalDeviceCount);
  9348. vulkanResult = renderer->vkEnumeratePhysicalDevices(
  9349. renderer->instance,
  9350. &physicalDeviceCount,
  9351. physicalDevices);
  9352. /* This should be impossible to hit, but from what I can tell this can
  9353. * be triggered not because the array is too small, but because there
  9354. * were drivers that turned out to be bogus, so this is the loader's way
  9355. * of telling us that the list is now smaller than expected :shrug:
  9356. */
  9357. if (vulkanResult == VK_INCOMPLETE) {
  9358. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "vkEnumeratePhysicalDevices returned VK_INCOMPLETE, will keep trying anyway...");
  9359. vulkanResult = VK_SUCCESS;
  9360. }
  9361. if (vulkanResult != VK_SUCCESS) {
  9362. SDL_LogWarn(
  9363. SDL_LOG_CATEGORY_GPU,
  9364. "vkEnumeratePhysicalDevices failed: %s",
  9365. VkErrorMessages(vulkanResult));
  9366. SDL_stack_free(physicalDevices);
  9367. SDL_stack_free(physicalDeviceExtensions);
  9368. return 0;
  9369. }
  9370. // Any suitable device will do, but we'd like the best
  9371. suitableIndex = -1;
  9372. suitableQueueFamilyIndex = 0;
  9373. highestRank = 0;
  9374. for (i = 0; i < physicalDeviceCount; i += 1) {
  9375. deviceRank = highestRank;
  9376. if (VULKAN_INTERNAL_IsDeviceSuitable(
  9377. renderer,
  9378. physicalDevices[i],
  9379. &physicalDeviceExtensions[i],
  9380. &queueFamilyIndex,
  9381. &deviceRank)) {
  9382. /* Use this for rendering.
  9383. * Note that this may override a previous device that
  9384. * supports rendering, but shares the same device rank.
  9385. */
  9386. suitableIndex = i;
  9387. suitableQueueFamilyIndex = queueFamilyIndex;
  9388. highestRank = deviceRank;
  9389. } else if (deviceRank > highestRank) {
  9390. /* In this case, we found a... "realer?" GPU,
  9391. * but it doesn't actually support our Vulkan.
  9392. * We should disqualify all devices below as a
  9393. * result, because if we don't we end up
  9394. * ignoring real hardware and risk using
  9395. * something like LLVMpipe instead!
  9396. * -flibit
  9397. */
  9398. suitableIndex = -1;
  9399. highestRank = deviceRank;
  9400. }
  9401. }
  9402. if (suitableIndex != -1) {
  9403. renderer->supports = physicalDeviceExtensions[suitableIndex];
  9404. renderer->physicalDevice = physicalDevices[suitableIndex];
  9405. renderer->queueFamilyIndex = suitableQueueFamilyIndex;
  9406. } else {
  9407. SDL_stack_free(physicalDevices);
  9408. SDL_stack_free(physicalDeviceExtensions);
  9409. return 0;
  9410. }
  9411. renderer->physicalDeviceProperties.sType =
  9412. VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
  9413. if (renderer->supports.KHR_driver_properties) {
  9414. renderer->physicalDeviceDriverProperties.sType =
  9415. VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR;
  9416. renderer->physicalDeviceDriverProperties.pNext = NULL;
  9417. renderer->physicalDeviceProperties.pNext =
  9418. &renderer->physicalDeviceDriverProperties;
  9419. renderer->vkGetPhysicalDeviceProperties2KHR(
  9420. renderer->physicalDevice,
  9421. &renderer->physicalDeviceProperties);
  9422. } else {
  9423. renderer->physicalDeviceProperties.pNext = NULL;
  9424. renderer->vkGetPhysicalDeviceProperties(
  9425. renderer->physicalDevice,
  9426. &renderer->physicalDeviceProperties.properties);
  9427. }
  9428. renderer->vkGetPhysicalDeviceMemoryProperties(
  9429. renderer->physicalDevice,
  9430. &renderer->memoryProperties);
  9431. SDL_stack_free(physicalDevices);
  9432. SDL_stack_free(physicalDeviceExtensions);
  9433. return 1;
  9434. }
  9435. static Uint8 VULKAN_INTERNAL_CreateLogicalDevice(
  9436. VulkanRenderer *renderer)
  9437. {
  9438. VkResult vulkanResult;
  9439. VkDeviceCreateInfo deviceCreateInfo;
  9440. VkPhysicalDeviceFeatures desiredDeviceFeatures;
  9441. VkPhysicalDeviceFeatures haveDeviceFeatures;
  9442. VkPhysicalDevicePortabilitySubsetFeaturesKHR portabilityFeatures;
  9443. const char **deviceExtensions;
  9444. VkDeviceQueueCreateInfo queueCreateInfo;
  9445. float queuePriority = 1.0f;
  9446. queueCreateInfo.sType =
  9447. VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  9448. queueCreateInfo.pNext = NULL;
  9449. queueCreateInfo.flags = 0;
  9450. queueCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex;
  9451. queueCreateInfo.queueCount = 1;
  9452. queueCreateInfo.pQueuePriorities = &queuePriority;
  9453. // check feature support
  9454. renderer->vkGetPhysicalDeviceFeatures(
  9455. renderer->physicalDevice,
  9456. &haveDeviceFeatures);
  9457. // specifying used device features
  9458. SDL_zero(desiredDeviceFeatures);
  9459. desiredDeviceFeatures.independentBlend = VK_TRUE;
  9460. desiredDeviceFeatures.samplerAnisotropy = VK_TRUE;
  9461. if (haveDeviceFeatures.fillModeNonSolid) {
  9462. desiredDeviceFeatures.fillModeNonSolid = VK_TRUE;
  9463. renderer->supportsFillModeNonSolid = true;
  9464. }
  9465. if (haveDeviceFeatures.multiDrawIndirect) {
  9466. desiredDeviceFeatures.multiDrawIndirect = VK_TRUE;
  9467. renderer->supportsMultiDrawIndirect = true;
  9468. }
  9469. // creating the logical device
  9470. deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
  9471. if (renderer->supports.KHR_portability_subset) {
  9472. portabilityFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR;
  9473. portabilityFeatures.pNext = NULL;
  9474. portabilityFeatures.constantAlphaColorBlendFactors = VK_FALSE;
  9475. portabilityFeatures.events = VK_FALSE;
  9476. portabilityFeatures.imageViewFormatReinterpretation = VK_FALSE;
  9477. portabilityFeatures.imageViewFormatSwizzle = VK_TRUE;
  9478. portabilityFeatures.imageView2DOn3DImage = VK_FALSE;
  9479. portabilityFeatures.multisampleArrayImage = VK_FALSE;
  9480. portabilityFeatures.mutableComparisonSamplers = VK_FALSE;
  9481. portabilityFeatures.pointPolygons = VK_FALSE;
  9482. portabilityFeatures.samplerMipLodBias = VK_FALSE; // Technically should be true, but eh
  9483. portabilityFeatures.separateStencilMaskRef = VK_FALSE;
  9484. portabilityFeatures.shaderSampleRateInterpolationFunctions = VK_FALSE;
  9485. portabilityFeatures.tessellationIsolines = VK_FALSE;
  9486. portabilityFeatures.tessellationPointMode = VK_FALSE;
  9487. portabilityFeatures.triangleFans = VK_FALSE;
  9488. portabilityFeatures.vertexAttributeAccessBeyondStride = VK_FALSE;
  9489. deviceCreateInfo.pNext = &portabilityFeatures;
  9490. } else {
  9491. deviceCreateInfo.pNext = NULL;
  9492. }
  9493. deviceCreateInfo.flags = 0;
  9494. deviceCreateInfo.queueCreateInfoCount = 1;
  9495. deviceCreateInfo.pQueueCreateInfos = &queueCreateInfo;
  9496. deviceCreateInfo.enabledLayerCount = 0;
  9497. deviceCreateInfo.ppEnabledLayerNames = NULL;
  9498. deviceCreateInfo.enabledExtensionCount = GetDeviceExtensionCount(
  9499. &renderer->supports);
  9500. deviceExtensions = SDL_stack_alloc(
  9501. const char *,
  9502. deviceCreateInfo.enabledExtensionCount);
  9503. CreateDeviceExtensionArray(&renderer->supports, deviceExtensions);
  9504. deviceCreateInfo.ppEnabledExtensionNames = deviceExtensions;
  9505. deviceCreateInfo.pEnabledFeatures = &desiredDeviceFeatures;
  9506. vulkanResult = renderer->vkCreateDevice(
  9507. renderer->physicalDevice,
  9508. &deviceCreateInfo,
  9509. NULL,
  9510. &renderer->logicalDevice);
  9511. SDL_stack_free((void *)deviceExtensions);
  9512. VULKAN_ERROR_CHECK(vulkanResult, vkCreateDevice, 0)
  9513. // Load vkDevice entry points
  9514. #define VULKAN_DEVICE_FUNCTION(func) \
  9515. renderer->func = (PFN_##func) \
  9516. renderer->vkGetDeviceProcAddr( \
  9517. renderer->logicalDevice, \
  9518. #func);
  9519. #include "SDL_gpu_vulkan_vkfuncs.h"
  9520. renderer->vkGetDeviceQueue(
  9521. renderer->logicalDevice,
  9522. renderer->queueFamilyIndex,
  9523. 0,
  9524. &renderer->unifiedQueue);
  9525. return 1;
  9526. }
  9527. static void VULKAN_INTERNAL_LoadEntryPoints(void)
  9528. {
  9529. // Required for MoltenVK support
  9530. SDL_setenv("MVK_CONFIG_FULL_IMAGE_VIEW_SWIZZLE", "1", 1);
  9531. // Load Vulkan entry points
  9532. if (!SDL_Vulkan_LoadLibrary(NULL)) {
  9533. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: SDL_Vulkan_LoadLibrary failed!");
  9534. return;
  9535. }
  9536. #ifdef HAVE_GCC_DIAGNOSTIC_PRAGMA
  9537. #pragma GCC diagnostic push
  9538. #pragma GCC diagnostic ignored "-Wpedantic"
  9539. #endif
  9540. vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)SDL_Vulkan_GetVkGetInstanceProcAddr();
  9541. #ifdef HAVE_GCC_DIAGNOSTIC_PRAGMA
  9542. #pragma GCC diagnostic pop
  9543. #endif
  9544. if (vkGetInstanceProcAddr == NULL) {
  9545. SDL_LogWarn(
  9546. SDL_LOG_CATEGORY_GPU,
  9547. "SDL_Vulkan_GetVkGetInstanceProcAddr(): %s",
  9548. SDL_GetError());
  9549. return;
  9550. }
  9551. #define VULKAN_GLOBAL_FUNCTION(name) \
  9552. name = (PFN_##name)vkGetInstanceProcAddr(VK_NULL_HANDLE, #name); \
  9553. if (name == NULL) { \
  9554. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "vkGetInstanceProcAddr(VK_NULL_HANDLE, \"" #name "\") failed"); \
  9555. return; \
  9556. }
  9557. #include "SDL_gpu_vulkan_vkfuncs.h"
  9558. }
  9559. static bool VULKAN_INTERNAL_PrepareVulkan(
  9560. VulkanRenderer *renderer)
  9561. {
  9562. VULKAN_INTERNAL_LoadEntryPoints();
  9563. if (!VULKAN_INTERNAL_CreateInstance(renderer)) {
  9564. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: Could not create Vulkan instance");
  9565. return false;
  9566. }
  9567. #define VULKAN_INSTANCE_FUNCTION(func) \
  9568. renderer->func = (PFN_##func)vkGetInstanceProcAddr(renderer->instance, #func);
  9569. #include "SDL_gpu_vulkan_vkfuncs.h"
  9570. if (!VULKAN_INTERNAL_DeterminePhysicalDevice(renderer)) {
  9571. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "Vulkan: Failed to determine a suitable physical device");
  9572. return false;
  9573. }
  9574. return true;
  9575. }
  9576. static bool VULKAN_PrepareDriver(SDL_VideoDevice *_this)
  9577. {
  9578. // Set up dummy VulkanRenderer
  9579. VulkanRenderer *renderer;
  9580. Uint8 result;
  9581. if (_this->Vulkan_CreateSurface == NULL) {
  9582. return false;
  9583. }
  9584. if (!SDL_Vulkan_LoadLibrary(NULL)) {
  9585. return false;
  9586. }
  9587. renderer = (VulkanRenderer *)SDL_malloc(sizeof(VulkanRenderer));
  9588. SDL_memset(renderer, '\0', sizeof(VulkanRenderer));
  9589. result = VULKAN_INTERNAL_PrepareVulkan(renderer);
  9590. if (result) {
  9591. renderer->vkDestroyInstance(renderer->instance, NULL);
  9592. }
  9593. SDL_free(renderer);
  9594. SDL_Vulkan_UnloadLibrary();
  9595. return result;
  9596. }
  9597. static SDL_GPUDevice *VULKAN_CreateDevice(bool debugMode, bool preferLowPower, SDL_PropertiesID props)
  9598. {
  9599. VulkanRenderer *renderer;
  9600. SDL_GPUDevice *result;
  9601. VkResult vulkanResult;
  9602. Uint32 i;
  9603. // Variables: Image Format Detection
  9604. VkImageFormatProperties imageFormatProperties;
  9605. if (!SDL_Vulkan_LoadLibrary(NULL)) {
  9606. SDL_assert(!"This should have failed in PrepareDevice first!");
  9607. return NULL;
  9608. }
  9609. renderer = (VulkanRenderer *)SDL_malloc(sizeof(VulkanRenderer));
  9610. SDL_memset(renderer, '\0', sizeof(VulkanRenderer));
  9611. renderer->debugMode = debugMode;
  9612. renderer->preferLowPower = preferLowPower;
  9613. if (!VULKAN_INTERNAL_PrepareVulkan(renderer)) {
  9614. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to initialize Vulkan!");
  9615. SDL_free(renderer);
  9616. SDL_Vulkan_UnloadLibrary();
  9617. return NULL;
  9618. }
  9619. SDL_LogInfo(SDL_LOG_CATEGORY_GPU, "SDL_GPU Driver: Vulkan");
  9620. SDL_LogInfo(
  9621. SDL_LOG_CATEGORY_GPU,
  9622. "Vulkan Device: %s",
  9623. renderer->physicalDeviceProperties.properties.deviceName);
  9624. if (renderer->supports.KHR_driver_properties) {
  9625. SDL_LogInfo(
  9626. SDL_LOG_CATEGORY_GPU,
  9627. "Vulkan Driver: %s %s",
  9628. renderer->physicalDeviceDriverProperties.driverName,
  9629. renderer->physicalDeviceDriverProperties.driverInfo);
  9630. SDL_LogInfo(
  9631. SDL_LOG_CATEGORY_GPU,
  9632. "Vulkan Conformance: %u.%u.%u",
  9633. renderer->physicalDeviceDriverProperties.conformanceVersion.major,
  9634. renderer->physicalDeviceDriverProperties.conformanceVersion.minor,
  9635. renderer->physicalDeviceDriverProperties.conformanceVersion.patch);
  9636. } else {
  9637. SDL_LogWarn(SDL_LOG_CATEGORY_GPU, "KHR_driver_properties unsupported! Bother your vendor about this!");
  9638. }
  9639. if (!VULKAN_INTERNAL_CreateLogicalDevice(
  9640. renderer)) {
  9641. SDL_LogError(SDL_LOG_CATEGORY_GPU, "Failed to create logical device");
  9642. SDL_free(renderer);
  9643. SDL_Vulkan_UnloadLibrary();
  9644. return NULL;
  9645. }
  9646. // FIXME: just move this into this function
  9647. result = (SDL_GPUDevice *)SDL_malloc(sizeof(SDL_GPUDevice));
  9648. ASSIGN_DRIVER(VULKAN)
  9649. result->driverData = (SDL_GPURenderer *)renderer;
  9650. /*
  9651. * Create initial swapchain array
  9652. */
  9653. renderer->claimedWindowCapacity = 1;
  9654. renderer->claimedWindowCount = 0;
  9655. renderer->claimedWindows = SDL_malloc(
  9656. renderer->claimedWindowCapacity * sizeof(WindowData *));
  9657. // Threading
  9658. renderer->allocatorLock = SDL_CreateMutex();
  9659. renderer->disposeLock = SDL_CreateMutex();
  9660. renderer->submitLock = SDL_CreateMutex();
  9661. renderer->acquireCommandBufferLock = SDL_CreateMutex();
  9662. renderer->acquireUniformBufferLock = SDL_CreateMutex();
  9663. renderer->renderPassFetchLock = SDL_CreateMutex();
  9664. renderer->framebufferFetchLock = SDL_CreateMutex();
  9665. /*
  9666. * Create submitted command buffer list
  9667. */
  9668. renderer->submittedCommandBufferCapacity = 16;
  9669. renderer->submittedCommandBufferCount = 0;
  9670. renderer->submittedCommandBuffers = SDL_malloc(sizeof(VulkanCommandBuffer *) * renderer->submittedCommandBufferCapacity);
  9671. // Memory Allocator
  9672. renderer->memoryAllocator = (VulkanMemoryAllocator *)SDL_malloc(
  9673. sizeof(VulkanMemoryAllocator));
  9674. for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1) {
  9675. renderer->memoryAllocator->subAllocators[i].memoryTypeIndex = i;
  9676. renderer->memoryAllocator->subAllocators[i].allocations = NULL;
  9677. renderer->memoryAllocator->subAllocators[i].allocationCount = 0;
  9678. renderer->memoryAllocator->subAllocators[i].sortedFreeRegions = SDL_malloc(
  9679. sizeof(VulkanMemoryFreeRegion *) * 4);
  9680. renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCount = 0;
  9681. renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCapacity = 4;
  9682. }
  9683. // Create uniform buffer pool
  9684. renderer->uniformBufferPoolCount = 32;
  9685. renderer->uniformBufferPoolCapacity = 32;
  9686. renderer->uniformBufferPool = SDL_malloc(
  9687. renderer->uniformBufferPoolCapacity * sizeof(VulkanUniformBuffer *));
  9688. for (i = 0; i < renderer->uniformBufferPoolCount; i += 1) {
  9689. renderer->uniformBufferPool[i] = VULKAN_INTERNAL_CreateUniformBuffer(
  9690. renderer,
  9691. UNIFORM_BUFFER_SIZE);
  9692. }
  9693. // Device limits
  9694. renderer->minUBOAlignment = (Uint32)renderer->physicalDeviceProperties.properties.limits.minUniformBufferOffsetAlignment;
  9695. // Initialize caches
  9696. renderer->commandPoolHashTable = SDL_CreateHashTable(
  9697. (void *)renderer,
  9698. 64,
  9699. VULKAN_INTERNAL_CommandPoolHashFunction,
  9700. VULKAN_INTERNAL_CommandPoolHashKeyMatch,
  9701. VULKAN_INTERNAL_CommandPoolHashNuke,
  9702. false);
  9703. renderer->renderPassHashTable = SDL_CreateHashTable(
  9704. (void *)renderer,
  9705. 64,
  9706. VULKAN_INTERNAL_RenderPassHashFunction,
  9707. VULKAN_INTERNAL_RenderPassHashKeyMatch,
  9708. VULKAN_INTERNAL_RenderPassHashNuke,
  9709. false);
  9710. renderer->framebufferHashTable = SDL_CreateHashTable(
  9711. (void *)renderer,
  9712. 64,
  9713. VULKAN_INTERNAL_FramebufferHashFunction,
  9714. VULKAN_INTERNAL_FramebufferHashKeyMatch,
  9715. VULKAN_INTERNAL_FramebufferHashNuke,
  9716. false);
  9717. // Initialize fence pool
  9718. renderer->fencePool.lock = SDL_CreateMutex();
  9719. renderer->fencePool.availableFenceCapacity = 4;
  9720. renderer->fencePool.availableFenceCount = 0;
  9721. renderer->fencePool.availableFences = SDL_malloc(
  9722. renderer->fencePool.availableFenceCapacity * sizeof(VulkanFenceHandle *));
  9723. // Some drivers don't support D16, so we have to fall back to D32.
  9724. vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
  9725. renderer->physicalDevice,
  9726. VK_FORMAT_D16_UNORM,
  9727. VK_IMAGE_TYPE_2D,
  9728. VK_IMAGE_TILING_OPTIMAL,
  9729. VK_IMAGE_ASPECT_DEPTH_BIT,
  9730. 0,
  9731. &imageFormatProperties);
  9732. if (vulkanResult == VK_ERROR_FORMAT_NOT_SUPPORTED) {
  9733. renderer->D16Format = VK_FORMAT_D32_SFLOAT;
  9734. } else {
  9735. renderer->D16Format = VK_FORMAT_D16_UNORM;
  9736. }
  9737. vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
  9738. renderer->physicalDevice,
  9739. VK_FORMAT_D16_UNORM_S8_UINT,
  9740. VK_IMAGE_TYPE_2D,
  9741. VK_IMAGE_TILING_OPTIMAL,
  9742. VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
  9743. 0,
  9744. &imageFormatProperties);
  9745. if (vulkanResult == VK_ERROR_FORMAT_NOT_SUPPORTED) {
  9746. renderer->D16S8Format = VK_FORMAT_D32_SFLOAT_S8_UINT;
  9747. } else {
  9748. renderer->D16S8Format = VK_FORMAT_D16_UNORM_S8_UINT;
  9749. }
  9750. // Deferred destroy storage
  9751. renderer->texturesToDestroyCapacity = 16;
  9752. renderer->texturesToDestroyCount = 0;
  9753. renderer->texturesToDestroy = (VulkanTexture **)SDL_malloc(
  9754. sizeof(VulkanTexture *) *
  9755. renderer->texturesToDestroyCapacity);
  9756. renderer->buffersToDestroyCapacity = 16;
  9757. renderer->buffersToDestroyCount = 0;
  9758. renderer->buffersToDestroy = SDL_malloc(
  9759. sizeof(VulkanBuffer *) *
  9760. renderer->buffersToDestroyCapacity);
  9761. renderer->samplersToDestroyCapacity = 16;
  9762. renderer->samplersToDestroyCount = 0;
  9763. renderer->samplersToDestroy = SDL_malloc(
  9764. sizeof(VulkanSampler *) *
  9765. renderer->samplersToDestroyCapacity);
  9766. renderer->graphicsPipelinesToDestroyCapacity = 16;
  9767. renderer->graphicsPipelinesToDestroyCount = 0;
  9768. renderer->graphicsPipelinesToDestroy = SDL_malloc(
  9769. sizeof(VulkanGraphicsPipeline *) *
  9770. renderer->graphicsPipelinesToDestroyCapacity);
  9771. renderer->computePipelinesToDestroyCapacity = 16;
  9772. renderer->computePipelinesToDestroyCount = 0;
  9773. renderer->computePipelinesToDestroy = SDL_malloc(
  9774. sizeof(VulkanComputePipeline *) *
  9775. renderer->computePipelinesToDestroyCapacity);
  9776. renderer->shadersToDestroyCapacity = 16;
  9777. renderer->shadersToDestroyCount = 0;
  9778. renderer->shadersToDestroy = SDL_malloc(
  9779. sizeof(VulkanShader *) *
  9780. renderer->shadersToDestroyCapacity);
  9781. renderer->framebuffersToDestroyCapacity = 16;
  9782. renderer->framebuffersToDestroyCount = 0;
  9783. renderer->framebuffersToDestroy = SDL_malloc(
  9784. sizeof(VulkanFramebuffer *) *
  9785. renderer->framebuffersToDestroyCapacity);
  9786. // Defrag state
  9787. renderer->defragInProgress = 0;
  9788. renderer->allocationsToDefragCount = 0;
  9789. renderer->allocationsToDefragCapacity = 4;
  9790. renderer->allocationsToDefrag = SDL_malloc(
  9791. renderer->allocationsToDefragCapacity * sizeof(VulkanMemoryAllocation *));
  9792. return result;
  9793. }
  9794. SDL_GPUBootstrap VulkanDriver = {
  9795. "Vulkan",
  9796. SDL_GPU_DRIVER_VULKAN,
  9797. SDL_GPU_SHADERFORMAT_SPIRV,
  9798. VULKAN_PrepareDriver,
  9799. VULKAN_CreateDevice
  9800. };
  9801. #endif // SDL_GPU_VULKAN