sokol_gfx.h 1.2 MB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226522752285229523052315232523352345235523652375238523952405241524252435244524552465247524852495250525152525253525452555256525752585259526052615262526352645265526652675268526952705271527252735274527552765277527852795280528152825283528452855286528752885289529052915292529352945295529652975298529953005301530253035304530553065307530853095310531153125313531453155316531753185319532053215322532353245325532653275328532953305331533253335334533553365337533853395340534153425343534453455346534753485349535053515352535353545355535653575358535953605361536253635364536553665367536853695370537153725373537453755376537753785379538053815382538353845385538653875388538953905391539253935394539553965397539853995400540154025403540454055406540754085409541054115412541354145415541654175418541954205421542254235424542554265427542854295430543154325433543454355436543754385439544054415442544354445445544654475448544954505451545254535454545554565457545854595460546154625463546454655466546754685469547054715472547354745475547654775478547954805481548254835484548554865487548854895490549154925493549454955496549754985499550055015502550355045505550655075508550955105511551255135514551555165517551855195520552155225523552455255526552755285529553055315532553355345535553655375538553955405541554255435544554555465547554855495550555155525553555455555556555755585559556055615562556355645565556655675568556955705571557255735574557555765577557855795580558155825583558455855586558755885589559055915592559355945595559655975598559956005601560256035604560556065607560856095610561156125613561456155616561756185619562056215622562356245625562656275628562956305631563256335634563556365637563856395640564156425643564456455646564756485649565056515652565356545655565656575658565956605661566256635664566556665667566856695670567156725673567456755676567756785679568056815682568356845685568656875688568956905691569256935694569556965697569856995700570157025703570457055706570757085709571057115712571357145715571657175718571957205721572257235724572557265727572857295730573157325733573457355736573757385739574057415742574357445745574657475748574957505751575257535754575557565757575857595760576157625763576457655766576757685769577057715772577357745775577657775778577957805781578257835784578557865787578857895790579157925793579457955796579757985799580058015802580358045805580658075808580958105811581258135814581558165817581858195820582158225823582458255826582758285829583058315832583358345835583658375838583958405841584258435844584558465847584858495850585158525853585458555856585758585859586058615862586358645865586658675868586958705871587258735874587558765877587858795880588158825883588458855886588758885889589058915892589358945895589658975898589959005901590259035904590559065907590859095910591159125913591459155916591759185919592059215922592359245925592659275928592959305931593259335934593559365937593859395940594159425943594459455946594759485949595059515952595359545955595659575958595959605961596259635964596559665967596859695970597159725973597459755976597759785979598059815982598359845985598659875988598959905991599259935994599559965997599859996000600160026003600460056006600760086009601060116012601360146015601660176018601960206021602260236024602560266027602860296030603160326033603460356036603760386039604060416042604360446045604660476048604960506051605260536054605560566057605860596060606160626063606460656066606760686069607060716072607360746075607660776078607960806081608260836084608560866087608860896090609160926093609460956096609760986099610061016102610361046105610661076108610961106111611261136114611561166117611861196120612161226123612461256126612761286129613061316132613361346135613661376138613961406141614261436144614561466147614861496150615161526153615461556156615761586159616061616162616361646165616661676168616961706171617261736174617561766177617861796180618161826183618461856186618761886189619061916192619361946195619661976198619962006201620262036204620562066207620862096210621162126213621462156216621762186219622062216222622362246225622662276228622962306231623262336234623562366237623862396240624162426243624462456246624762486249625062516252625362546255625662576258625962606261626262636264626562666267626862696270627162726273627462756276627762786279628062816282628362846285628662876288628962906291629262936294629562966297629862996300630163026303630463056306630763086309631063116312631363146315631663176318631963206321632263236324632563266327632863296330633163326333633463356336633763386339634063416342634363446345634663476348634963506351635263536354635563566357635863596360636163626363636463656366636763686369637063716372637363746375637663776378637963806381638263836384638563866387638863896390639163926393639463956396639763986399640064016402640364046405640664076408640964106411641264136414641564166417641864196420642164226423642464256426642764286429643064316432643364346435643664376438643964406441644264436444644564466447644864496450645164526453645464556456645764586459646064616462646364646465646664676468646964706471647264736474647564766477647864796480648164826483648464856486648764886489649064916492649364946495649664976498649965006501650265036504650565066507650865096510651165126513651465156516651765186519652065216522652365246525652665276528652965306531653265336534653565366537653865396540654165426543654465456546654765486549655065516552655365546555655665576558655965606561656265636564656565666567656865696570657165726573657465756576657765786579658065816582658365846585658665876588658965906591659265936594659565966597659865996600660166026603660466056606660766086609661066116612661366146615661666176618661966206621662266236624662566266627662866296630663166326633663466356636663766386639664066416642664366446645664666476648664966506651665266536654665566566657665866596660666166626663666466656666666766686669667066716672667366746675667666776678667966806681668266836684668566866687668866896690669166926693669466956696669766986699670067016702670367046705670667076708670967106711671267136714671567166717671867196720672167226723672467256726672767286729673067316732673367346735673667376738673967406741674267436744674567466747674867496750675167526753675467556756675767586759676067616762676367646765676667676768676967706771677267736774677567766777677867796780678167826783678467856786678767886789679067916792679367946795679667976798679968006801680268036804680568066807680868096810681168126813681468156816681768186819682068216822682368246825682668276828682968306831683268336834683568366837683868396840684168426843684468456846684768486849685068516852685368546855685668576858685968606861686268636864686568666867686868696870687168726873687468756876687768786879688068816882688368846885688668876888688968906891689268936894689568966897689868996900690169026903690469056906690769086909691069116912691369146915691669176918691969206921692269236924692569266927692869296930693169326933693469356936693769386939694069416942694369446945694669476948694969506951695269536954695569566957695869596960696169626963696469656966696769686969697069716972697369746975697669776978697969806981698269836984698569866987698869896990699169926993699469956996699769986999700070017002700370047005700670077008700970107011701270137014701570167017701870197020702170227023702470257026702770287029703070317032703370347035703670377038703970407041704270437044704570467047704870497050705170527053705470557056705770587059706070617062706370647065706670677068706970707071707270737074707570767077707870797080708170827083708470857086708770887089709070917092709370947095709670977098709971007101710271037104710571067107710871097110711171127113711471157116711771187119712071217122712371247125712671277128712971307131713271337134713571367137713871397140714171427143714471457146714771487149715071517152715371547155715671577158715971607161716271637164716571667167716871697170717171727173717471757176717771787179718071817182718371847185718671877188718971907191719271937194719571967197719871997200720172027203720472057206720772087209721072117212721372147215721672177218721972207221722272237224722572267227722872297230723172327233723472357236723772387239724072417242724372447245724672477248724972507251725272537254725572567257725872597260726172627263726472657266726772687269727072717272727372747275727672777278727972807281728272837284728572867287728872897290729172927293729472957296729772987299730073017302730373047305730673077308730973107311731273137314731573167317731873197320732173227323732473257326732773287329733073317332733373347335733673377338733973407341734273437344734573467347734873497350735173527353735473557356735773587359736073617362736373647365736673677368736973707371737273737374737573767377737873797380738173827383738473857386738773887389739073917392739373947395739673977398739974007401740274037404740574067407740874097410741174127413741474157416741774187419742074217422742374247425742674277428742974307431743274337434743574367437743874397440744174427443744474457446744774487449745074517452745374547455745674577458745974607461746274637464746574667467746874697470747174727473747474757476747774787479748074817482748374847485748674877488748974907491749274937494749574967497749874997500750175027503750475057506750775087509751075117512751375147515751675177518751975207521752275237524752575267527752875297530753175327533753475357536753775387539754075417542754375447545754675477548754975507551755275537554755575567557755875597560756175627563756475657566756775687569757075717572757375747575757675777578757975807581758275837584758575867587758875897590759175927593759475957596759775987599760076017602760376047605760676077608760976107611761276137614761576167617761876197620762176227623762476257626762776287629763076317632763376347635763676377638763976407641764276437644764576467647764876497650765176527653765476557656765776587659766076617662766376647665766676677668766976707671767276737674767576767677767876797680768176827683768476857686768776887689769076917692769376947695769676977698769977007701770277037704770577067707770877097710771177127713771477157716771777187719772077217722772377247725772677277728772977307731773277337734773577367737773877397740774177427743774477457746774777487749775077517752775377547755775677577758775977607761776277637764776577667767776877697770777177727773777477757776777777787779778077817782778377847785778677877788778977907791779277937794779577967797779877997800780178027803780478057806780778087809781078117812781378147815781678177818781978207821782278237824782578267827782878297830783178327833783478357836783778387839784078417842784378447845784678477848784978507851785278537854785578567857785878597860786178627863786478657866786778687869787078717872787378747875787678777878787978807881788278837884788578867887788878897890789178927893789478957896789778987899790079017902790379047905790679077908790979107911791279137914791579167917791879197920792179227923792479257926792779287929793079317932793379347935793679377938793979407941794279437944794579467947794879497950795179527953795479557956795779587959796079617962796379647965796679677968796979707971797279737974797579767977797879797980798179827983798479857986798779887989799079917992799379947995799679977998799980008001800280038004800580068007800880098010801180128013801480158016801780188019802080218022802380248025802680278028802980308031803280338034803580368037803880398040804180428043804480458046804780488049805080518052805380548055805680578058805980608061806280638064806580668067806880698070807180728073807480758076807780788079808080818082808380848085808680878088808980908091809280938094809580968097809880998100810181028103810481058106810781088109811081118112811381148115811681178118811981208121812281238124812581268127812881298130813181328133813481358136813781388139814081418142814381448145814681478148814981508151815281538154815581568157815881598160816181628163816481658166816781688169817081718172817381748175817681778178817981808181818281838184818581868187818881898190819181928193819481958196819781988199820082018202820382048205820682078208820982108211821282138214821582168217821882198220822182228223822482258226822782288229823082318232823382348235823682378238823982408241824282438244824582468247824882498250825182528253825482558256825782588259826082618262826382648265826682678268826982708271827282738274827582768277827882798280828182828283828482858286828782888289829082918292829382948295829682978298829983008301830283038304830583068307830883098310831183128313831483158316831783188319832083218322832383248325832683278328832983308331833283338334833583368337833883398340834183428343834483458346834783488349835083518352835383548355835683578358835983608361836283638364836583668367836883698370837183728373837483758376837783788379838083818382838383848385838683878388838983908391839283938394839583968397839883998400840184028403840484058406840784088409841084118412841384148415841684178418841984208421842284238424842584268427842884298430843184328433843484358436843784388439844084418442844384448445844684478448844984508451845284538454845584568457845884598460846184628463846484658466846784688469847084718472847384748475847684778478847984808481848284838484848584868487848884898490849184928493849484958496849784988499850085018502850385048505850685078508850985108511851285138514851585168517851885198520852185228523852485258526852785288529853085318532853385348535853685378538853985408541854285438544854585468547854885498550855185528553855485558556855785588559856085618562856385648565856685678568856985708571857285738574857585768577857885798580858185828583858485858586858785888589859085918592859385948595859685978598859986008601860286038604860586068607860886098610861186128613861486158616861786188619862086218622862386248625862686278628862986308631863286338634863586368637863886398640864186428643864486458646864786488649865086518652865386548655865686578658865986608661866286638664866586668667866886698670867186728673867486758676867786788679868086818682868386848685868686878688868986908691869286938694869586968697869886998700870187028703870487058706870787088709871087118712871387148715871687178718871987208721872287238724872587268727872887298730873187328733873487358736873787388739874087418742874387448745874687478748874987508751875287538754875587568757875887598760876187628763876487658766876787688769877087718772877387748775877687778778877987808781878287838784878587868787878887898790879187928793879487958796879787988799880088018802880388048805880688078808880988108811881288138814881588168817881888198820882188228823882488258826882788288829883088318832883388348835883688378838883988408841884288438844884588468847884888498850885188528853885488558856885788588859886088618862886388648865886688678868886988708871887288738874887588768877887888798880888188828883888488858886888788888889889088918892889388948895889688978898889989008901890289038904890589068907890889098910891189128913891489158916891789188919892089218922892389248925892689278928892989308931893289338934893589368937893889398940894189428943894489458946894789488949895089518952895389548955895689578958895989608961896289638964896589668967896889698970897189728973897489758976897789788979898089818982898389848985898689878988898989908991899289938994899589968997899889999000900190029003900490059006900790089009901090119012901390149015901690179018901990209021902290239024902590269027902890299030903190329033903490359036903790389039904090419042904390449045904690479048904990509051905290539054905590569057905890599060906190629063906490659066906790689069907090719072907390749075907690779078907990809081908290839084908590869087908890899090909190929093909490959096909790989099910091019102910391049105910691079108910991109111911291139114911591169117911891199120912191229123912491259126912791289129913091319132913391349135913691379138913991409141914291439144914591469147914891499150915191529153915491559156915791589159916091619162916391649165916691679168916991709171917291739174917591769177917891799180918191829183918491859186918791889189919091919192919391949195919691979198919992009201920292039204920592069207920892099210921192129213921492159216921792189219922092219222922392249225922692279228922992309231923292339234923592369237923892399240924192429243924492459246924792489249925092519252925392549255925692579258925992609261926292639264926592669267926892699270927192729273927492759276927792789279928092819282928392849285928692879288928992909291929292939294929592969297929892999300930193029303930493059306930793089309931093119312931393149315931693179318931993209321932293239324932593269327932893299330933193329333933493359336933793389339934093419342934393449345934693479348934993509351935293539354935593569357935893599360936193629363936493659366936793689369937093719372937393749375937693779378937993809381938293839384938593869387938893899390939193929393939493959396939793989399940094019402940394049405940694079408940994109411941294139414941594169417941894199420942194229423942494259426942794289429943094319432943394349435943694379438943994409441944294439444944594469447944894499450945194529453945494559456945794589459946094619462946394649465946694679468946994709471947294739474947594769477947894799480948194829483948494859486948794889489949094919492949394949495949694979498949995009501950295039504950595069507950895099510951195129513951495159516951795189519952095219522952395249525952695279528952995309531953295339534953595369537953895399540954195429543954495459546954795489549955095519552955395549555955695579558955995609561956295639564956595669567956895699570957195729573957495759576957795789579958095819582958395849585958695879588958995909591959295939594959595969597959895999600960196029603960496059606960796089609961096119612961396149615961696179618961996209621962296239624962596269627962896299630963196329633963496359636963796389639964096419642964396449645964696479648964996509651965296539654965596569657965896599660966196629663966496659666966796689669967096719672967396749675967696779678967996809681968296839684968596869687968896899690969196929693969496959696969796989699970097019702970397049705970697079708970997109711971297139714971597169717971897199720972197229723972497259726972797289729973097319732973397349735973697379738973997409741974297439744974597469747974897499750975197529753975497559756975797589759976097619762976397649765976697679768976997709771977297739774977597769777977897799780978197829783978497859786978797889789979097919792979397949795979697979798979998009801980298039804980598069807980898099810981198129813981498159816981798189819982098219822982398249825982698279828982998309831983298339834983598369837983898399840984198429843984498459846984798489849985098519852985398549855985698579858985998609861986298639864986598669867986898699870987198729873987498759876987798789879988098819882988398849885988698879888988998909891989298939894989598969897989898999900990199029903990499059906990799089909991099119912991399149915991699179918991999209921992299239924992599269927992899299930993199329933993499359936993799389939994099419942994399449945994699479948994999509951995299539954995599569957995899599960996199629963996499659966996799689969997099719972997399749975997699779978997999809981998299839984998599869987998899899990999199929993999499959996999799989999100001000110002100031000410005100061000710008100091001010011100121001310014100151001610017100181001910020100211002210023100241002510026100271002810029100301003110032100331003410035100361003710038100391004010041100421004310044100451004610047100481004910050100511005210053100541005510056100571005810059100601006110062100631006410065100661006710068100691007010071100721007310074100751007610077100781007910080100811008210083100841008510086100871008810089100901009110092100931009410095100961009710098100991010010101101021010310104101051010610107101081010910110101111011210113101141011510116101171011810119101201012110122101231012410125101261012710128101291013010131101321013310134101351013610137101381013910140101411014210143101441014510146101471014810149101501015110152101531015410155101561015710158101591016010161101621016310164101651016610167101681016910170101711017210173101741017510176101771017810179101801018110182101831018410185101861018710188101891019010191101921019310194101951019610197101981019910200102011020210203102041020510206102071020810209102101021110212102131021410215102161021710218102191022010221102221022310224102251022610227102281022910230102311023210233102341023510236102371023810239102401024110242102431024410245102461024710248102491025010251102521025310254102551025610257102581025910260102611026210263102641026510266102671026810269102701027110272102731027410275102761027710278102791028010281102821028310284102851028610287102881028910290102911029210293102941029510296102971029810299103001030110302103031030410305103061030710308103091031010311103121031310314103151031610317103181031910320103211032210323103241032510326103271032810329103301033110332103331033410335103361033710338103391034010341103421034310344103451034610347103481034910350103511035210353103541035510356103571035810359103601036110362103631036410365103661036710368103691037010371103721037310374103751037610377103781037910380103811038210383103841038510386103871038810389103901039110392103931039410395103961039710398103991040010401104021040310404104051040610407104081040910410104111041210413104141041510416104171041810419104201042110422104231042410425104261042710428104291043010431104321043310434104351043610437104381043910440104411044210443104441044510446104471044810449104501045110452104531045410455104561045710458104591046010461104621046310464104651046610467104681046910470104711047210473104741047510476104771047810479104801048110482104831048410485104861048710488104891049010491104921049310494104951049610497104981049910500105011050210503105041050510506105071050810509105101051110512105131051410515105161051710518105191052010521105221052310524105251052610527105281052910530105311053210533105341053510536105371053810539105401054110542105431054410545105461054710548105491055010551105521055310554105551055610557105581055910560105611056210563105641056510566105671056810569105701057110572105731057410575105761057710578105791058010581105821058310584105851058610587105881058910590105911059210593105941059510596105971059810599106001060110602106031060410605106061060710608106091061010611106121061310614106151061610617106181061910620106211062210623106241062510626106271062810629106301063110632106331063410635106361063710638106391064010641106421064310644106451064610647106481064910650106511065210653106541065510656106571065810659106601066110662106631066410665106661066710668106691067010671106721067310674106751067610677106781067910680106811068210683106841068510686106871068810689106901069110692106931069410695106961069710698106991070010701107021070310704107051070610707107081070910710107111071210713107141071510716107171071810719107201072110722107231072410725107261072710728107291073010731107321073310734107351073610737107381073910740107411074210743107441074510746107471074810749107501075110752107531075410755107561075710758107591076010761107621076310764107651076610767107681076910770107711077210773107741077510776107771077810779107801078110782107831078410785107861078710788107891079010791107921079310794107951079610797107981079910800108011080210803108041080510806108071080810809108101081110812108131081410815108161081710818108191082010821108221082310824108251082610827108281082910830108311083210833108341083510836108371083810839108401084110842108431084410845108461084710848108491085010851108521085310854108551085610857108581085910860108611086210863108641086510866108671086810869108701087110872108731087410875108761087710878108791088010881108821088310884108851088610887108881088910890108911089210893108941089510896108971089810899109001090110902109031090410905109061090710908109091091010911109121091310914109151091610917109181091910920109211092210923109241092510926109271092810929109301093110932109331093410935109361093710938109391094010941109421094310944109451094610947109481094910950109511095210953109541095510956109571095810959109601096110962109631096410965109661096710968109691097010971109721097310974109751097610977109781097910980109811098210983109841098510986109871098810989109901099110992109931099410995109961099710998109991100011001110021100311004110051100611007110081100911010110111101211013110141101511016110171101811019110201102111022110231102411025110261102711028110291103011031110321103311034110351103611037110381103911040110411104211043110441104511046110471104811049110501105111052110531105411055110561105711058110591106011061110621106311064110651106611067110681106911070110711107211073110741107511076110771107811079110801108111082110831108411085110861108711088110891109011091110921109311094110951109611097110981109911100111011110211103111041110511106111071110811109111101111111112111131111411115111161111711118111191112011121111221112311124111251112611127111281112911130111311113211133111341113511136111371113811139111401114111142111431114411145111461114711148111491115011151111521115311154111551115611157111581115911160111611116211163111641116511166111671116811169111701117111172111731117411175111761117711178111791118011181111821118311184111851118611187111881118911190111911119211193111941119511196111971119811199112001120111202112031120411205112061120711208112091121011211112121121311214112151121611217112181121911220112211122211223112241122511226112271122811229112301123111232112331123411235112361123711238112391124011241112421124311244112451124611247112481124911250112511125211253112541125511256112571125811259112601126111262112631126411265112661126711268112691127011271112721127311274112751127611277112781127911280112811128211283112841128511286112871128811289112901129111292112931129411295112961129711298112991130011301113021130311304113051130611307113081130911310113111131211313113141131511316113171131811319113201132111322113231132411325113261132711328113291133011331113321133311334113351133611337113381133911340113411134211343113441134511346113471134811349113501135111352113531135411355113561135711358113591136011361113621136311364113651136611367113681136911370113711137211373113741137511376113771137811379113801138111382113831138411385113861138711388113891139011391113921139311394113951139611397113981139911400114011140211403114041140511406114071140811409114101141111412114131141411415114161141711418114191142011421114221142311424114251142611427114281142911430114311143211433114341143511436114371143811439114401144111442114431144411445114461144711448114491145011451114521145311454114551145611457114581145911460114611146211463114641146511466114671146811469114701147111472114731147411475114761147711478114791148011481114821148311484114851148611487114881148911490114911149211493114941149511496114971149811499115001150111502115031150411505115061150711508115091151011511115121151311514115151151611517115181151911520115211152211523115241152511526115271152811529115301153111532115331153411535115361153711538115391154011541115421154311544115451154611547115481154911550115511155211553115541155511556115571155811559115601156111562115631156411565115661156711568115691157011571115721157311574115751157611577115781157911580115811158211583115841158511586115871158811589115901159111592115931159411595115961159711598115991160011601116021160311604116051160611607116081160911610116111161211613116141161511616116171161811619116201162111622116231162411625116261162711628116291163011631116321163311634116351163611637116381163911640116411164211643116441164511646116471164811649116501165111652116531165411655116561165711658116591166011661116621166311664116651166611667116681166911670116711167211673116741167511676116771167811679116801168111682116831168411685116861168711688116891169011691116921169311694116951169611697116981169911700117011170211703117041170511706117071170811709117101171111712117131171411715117161171711718117191172011721117221172311724117251172611727117281172911730117311173211733117341173511736117371173811739117401174111742117431174411745117461174711748117491175011751117521175311754117551175611757117581175911760117611176211763117641176511766117671176811769117701177111772117731177411775117761177711778117791178011781117821178311784117851178611787117881178911790117911179211793117941179511796117971179811799118001180111802118031180411805118061180711808118091181011811118121181311814118151181611817118181181911820118211182211823118241182511826118271182811829118301183111832118331183411835118361183711838118391184011841118421184311844118451184611847118481184911850118511185211853118541185511856118571185811859118601186111862118631186411865118661186711868118691187011871118721187311874118751187611877118781187911880118811188211883118841188511886118871188811889118901189111892118931189411895118961189711898118991190011901119021190311904119051190611907119081190911910119111191211913119141191511916119171191811919119201192111922119231192411925119261192711928119291193011931119321193311934119351193611937119381193911940119411194211943119441194511946119471194811949119501195111952119531195411955119561195711958119591196011961119621196311964119651196611967119681196911970119711197211973119741197511976119771197811979119801198111982119831198411985119861198711988119891199011991119921199311994119951199611997119981199912000120011200212003120041200512006120071200812009120101201112012120131201412015120161201712018120191202012021120221202312024120251202612027120281202912030120311203212033120341203512036120371203812039120401204112042120431204412045120461204712048120491205012051120521205312054120551205612057120581205912060120611206212063120641206512066120671206812069120701207112072120731207412075120761207712078120791208012081120821208312084120851208612087120881208912090120911209212093120941209512096120971209812099121001210112102121031210412105121061210712108121091211012111121121211312114121151211612117121181211912120121211212212123121241212512126121271212812129121301213112132121331213412135121361213712138121391214012141121421214312144121451214612147121481214912150121511215212153121541215512156121571215812159121601216112162121631216412165121661216712168121691217012171121721217312174121751217612177121781217912180121811218212183121841218512186121871218812189121901219112192121931219412195121961219712198121991220012201122021220312204122051220612207122081220912210122111221212213122141221512216122171221812219122201222112222122231222412225122261222712228122291223012231122321223312234122351223612237122381223912240122411224212243122441224512246122471224812249122501225112252122531225412255122561225712258122591226012261122621226312264122651226612267122681226912270122711227212273122741227512276122771227812279122801228112282122831228412285122861228712288122891229012291122921229312294122951229612297122981229912300123011230212303123041230512306123071230812309123101231112312123131231412315123161231712318123191232012321123221232312324123251232612327123281232912330123311233212333123341233512336123371233812339123401234112342123431234412345123461234712348123491235012351123521235312354123551235612357123581235912360123611236212363123641236512366123671236812369123701237112372123731237412375123761237712378123791238012381123821238312384123851238612387123881238912390123911239212393123941239512396123971239812399124001240112402124031240412405124061240712408124091241012411124121241312414124151241612417124181241912420124211242212423124241242512426124271242812429124301243112432124331243412435124361243712438124391244012441124421244312444124451244612447124481244912450124511245212453124541245512456124571245812459124601246112462124631246412465124661246712468124691247012471124721247312474124751247612477124781247912480124811248212483124841248512486124871248812489124901249112492124931249412495124961249712498124991250012501125021250312504125051250612507125081250912510125111251212513125141251512516125171251812519125201252112522125231252412525125261252712528125291253012531125321253312534125351253612537125381253912540125411254212543125441254512546125471254812549125501255112552125531255412555125561255712558125591256012561125621256312564125651256612567125681256912570125711257212573125741257512576125771257812579125801258112582125831258412585125861258712588125891259012591125921259312594125951259612597125981259912600126011260212603126041260512606126071260812609126101261112612126131261412615126161261712618126191262012621126221262312624126251262612627126281262912630126311263212633126341263512636126371263812639126401264112642126431264412645126461264712648126491265012651126521265312654126551265612657126581265912660126611266212663126641266512666126671266812669126701267112672126731267412675126761267712678126791268012681126821268312684126851268612687126881268912690126911269212693126941269512696126971269812699127001270112702127031270412705127061270712708127091271012711127121271312714127151271612717127181271912720127211272212723127241272512726127271272812729127301273112732127331273412735127361273712738127391274012741127421274312744127451274612747127481274912750127511275212753127541275512756127571275812759127601276112762127631276412765127661276712768127691277012771127721277312774127751277612777127781277912780127811278212783127841278512786127871278812789127901279112792127931279412795127961279712798127991280012801128021280312804128051280612807128081280912810128111281212813128141281512816128171281812819128201282112822128231282412825128261282712828128291283012831128321283312834128351283612837128381283912840128411284212843128441284512846128471284812849128501285112852128531285412855128561285712858128591286012861128621286312864128651286612867128681286912870128711287212873128741287512876128771287812879128801288112882128831288412885128861288712888128891289012891128921289312894128951289612897128981289912900129011290212903129041290512906129071290812909129101291112912129131291412915129161291712918129191292012921129221292312924129251292612927129281292912930129311293212933129341293512936129371293812939129401294112942129431294412945129461294712948129491295012951129521295312954129551295612957129581295912960129611296212963129641296512966129671296812969129701297112972129731297412975129761297712978129791298012981129821298312984129851298612987129881298912990129911299212993129941299512996129971299812999130001300113002130031300413005130061300713008130091301013011130121301313014130151301613017130181301913020130211302213023130241302513026130271302813029130301303113032130331303413035130361303713038130391304013041130421304313044130451304613047130481304913050130511305213053130541305513056130571305813059130601306113062130631306413065130661306713068130691307013071130721307313074130751307613077130781307913080130811308213083130841308513086130871308813089130901309113092130931309413095130961309713098130991310013101131021310313104131051310613107131081310913110131111311213113131141311513116131171311813119131201312113122131231312413125131261312713128131291313013131131321313313134131351313613137131381313913140131411314213143131441314513146131471314813149131501315113152131531315413155131561315713158131591316013161131621316313164131651316613167131681316913170131711317213173131741317513176131771317813179131801318113182131831318413185131861318713188131891319013191131921319313194131951319613197131981319913200132011320213203132041320513206132071320813209132101321113212132131321413215132161321713218132191322013221132221322313224132251322613227132281322913230132311323213233132341323513236132371323813239132401324113242132431324413245132461324713248132491325013251132521325313254132551325613257132581325913260132611326213263132641326513266132671326813269132701327113272132731327413275132761327713278132791328013281132821328313284132851328613287132881328913290132911329213293132941329513296132971329813299133001330113302133031330413305133061330713308133091331013311133121331313314133151331613317133181331913320133211332213323133241332513326133271332813329133301333113332133331333413335133361333713338133391334013341133421334313344133451334613347133481334913350133511335213353133541335513356133571335813359133601336113362133631336413365133661336713368133691337013371133721337313374133751337613377133781337913380133811338213383133841338513386133871338813389133901339113392133931339413395133961339713398133991340013401134021340313404134051340613407134081340913410134111341213413134141341513416134171341813419134201342113422134231342413425134261342713428134291343013431134321343313434134351343613437134381343913440134411344213443134441344513446134471344813449134501345113452134531345413455134561345713458134591346013461134621346313464134651346613467134681346913470134711347213473134741347513476134771347813479134801348113482134831348413485134861348713488134891349013491134921349313494134951349613497134981349913500135011350213503135041350513506135071350813509135101351113512135131351413515135161351713518135191352013521135221352313524135251352613527135281352913530135311353213533135341353513536135371353813539135401354113542135431354413545135461354713548135491355013551135521355313554135551355613557135581355913560135611356213563135641356513566135671356813569135701357113572135731357413575135761357713578135791358013581135821358313584135851358613587135881358913590135911359213593135941359513596135971359813599136001360113602136031360413605136061360713608136091361013611136121361313614136151361613617136181361913620136211362213623136241362513626136271362813629136301363113632136331363413635136361363713638136391364013641136421364313644136451364613647136481364913650136511365213653136541365513656136571365813659136601366113662136631366413665136661366713668136691367013671136721367313674136751367613677136781367913680136811368213683136841368513686136871368813689136901369113692136931369413695136961369713698136991370013701137021370313704137051370613707137081370913710137111371213713137141371513716137171371813719137201372113722137231372413725137261372713728137291373013731137321373313734137351373613737137381373913740137411374213743137441374513746137471374813749137501375113752137531375413755137561375713758137591376013761137621376313764137651376613767137681376913770137711377213773137741377513776137771377813779137801378113782137831378413785137861378713788137891379013791137921379313794137951379613797137981379913800138011380213803138041380513806138071380813809138101381113812138131381413815138161381713818138191382013821138221382313824138251382613827138281382913830138311383213833138341383513836138371383813839138401384113842138431384413845138461384713848138491385013851138521385313854138551385613857138581385913860138611386213863138641386513866138671386813869138701387113872138731387413875138761387713878138791388013881138821388313884138851388613887138881388913890138911389213893138941389513896138971389813899139001390113902139031390413905139061390713908139091391013911139121391313914139151391613917139181391913920139211392213923139241392513926139271392813929139301393113932139331393413935139361393713938139391394013941139421394313944139451394613947139481394913950139511395213953139541395513956139571395813959139601396113962139631396413965139661396713968139691397013971139721397313974139751397613977139781397913980139811398213983139841398513986139871398813989139901399113992139931399413995139961399713998139991400014001140021400314004140051400614007140081400914010140111401214013140141401514016140171401814019140201402114022140231402414025140261402714028140291403014031140321403314034140351403614037140381403914040140411404214043140441404514046140471404814049140501405114052140531405414055140561405714058140591406014061140621406314064140651406614067140681406914070140711407214073140741407514076140771407814079140801408114082140831408414085140861408714088140891409014091140921409314094140951409614097140981409914100141011410214103141041410514106141071410814109141101411114112141131411414115141161411714118141191412014121141221412314124141251412614127141281412914130141311413214133141341413514136141371413814139141401414114142141431414414145141461414714148141491415014151141521415314154141551415614157141581415914160141611416214163141641416514166141671416814169141701417114172141731417414175141761417714178141791418014181141821418314184141851418614187141881418914190141911419214193141941419514196141971419814199142001420114202142031420414205142061420714208142091421014211142121421314214142151421614217142181421914220142211422214223142241422514226142271422814229142301423114232142331423414235142361423714238142391424014241142421424314244142451424614247142481424914250142511425214253142541425514256142571425814259142601426114262142631426414265142661426714268142691427014271142721427314274142751427614277142781427914280142811428214283142841428514286142871428814289142901429114292142931429414295142961429714298142991430014301143021430314304143051430614307143081430914310143111431214313143141431514316143171431814319143201432114322143231432414325143261432714328143291433014331143321433314334143351433614337143381433914340143411434214343143441434514346143471434814349143501435114352143531435414355143561435714358143591436014361143621436314364143651436614367143681436914370143711437214373143741437514376143771437814379143801438114382143831438414385143861438714388143891439014391143921439314394143951439614397143981439914400144011440214403144041440514406144071440814409144101441114412144131441414415144161441714418144191442014421144221442314424144251442614427144281442914430144311443214433144341443514436144371443814439144401444114442144431444414445144461444714448144491445014451144521445314454144551445614457144581445914460144611446214463144641446514466144671446814469144701447114472144731447414475144761447714478144791448014481144821448314484144851448614487144881448914490144911449214493144941449514496144971449814499145001450114502145031450414505145061450714508145091451014511145121451314514145151451614517145181451914520145211452214523145241452514526145271452814529145301453114532145331453414535145361453714538145391454014541145421454314544145451454614547145481454914550145511455214553145541455514556145571455814559145601456114562145631456414565145661456714568145691457014571145721457314574145751457614577145781457914580145811458214583145841458514586145871458814589145901459114592145931459414595145961459714598145991460014601146021460314604146051460614607146081460914610146111461214613146141461514616146171461814619146201462114622146231462414625146261462714628146291463014631146321463314634146351463614637146381463914640146411464214643146441464514646146471464814649146501465114652146531465414655146561465714658146591466014661146621466314664146651466614667146681466914670146711467214673146741467514676146771467814679146801468114682146831468414685146861468714688146891469014691146921469314694146951469614697146981469914700147011470214703147041470514706147071470814709147101471114712147131471414715147161471714718147191472014721147221472314724147251472614727147281472914730147311473214733147341473514736147371473814739147401474114742147431474414745147461474714748147491475014751147521475314754147551475614757147581475914760147611476214763147641476514766147671476814769147701477114772147731477414775147761477714778147791478014781147821478314784147851478614787147881478914790147911479214793147941479514796147971479814799148001480114802148031480414805148061480714808148091481014811148121481314814148151481614817148181481914820148211482214823148241482514826148271482814829148301483114832148331483414835148361483714838148391484014841148421484314844148451484614847148481484914850148511485214853148541485514856148571485814859148601486114862148631486414865148661486714868148691487014871148721487314874148751487614877148781487914880148811488214883148841488514886148871488814889148901489114892148931489414895148961489714898148991490014901149021490314904149051490614907149081490914910149111491214913149141491514916149171491814919149201492114922149231492414925149261492714928149291493014931149321493314934149351493614937149381493914940149411494214943149441494514946149471494814949149501495114952149531495414955149561495714958149591496014961149621496314964149651496614967149681496914970149711497214973149741497514976149771497814979149801498114982149831498414985149861498714988149891499014991149921499314994149951499614997149981499915000150011500215003150041500515006150071500815009150101501115012150131501415015150161501715018150191502015021150221502315024150251502615027150281502915030150311503215033150341503515036150371503815039150401504115042150431504415045150461504715048150491505015051150521505315054150551505615057150581505915060150611506215063150641506515066150671506815069150701507115072150731507415075150761507715078150791508015081150821508315084150851508615087150881508915090150911509215093150941509515096150971509815099151001510115102151031510415105151061510715108151091511015111151121511315114151151511615117151181511915120151211512215123151241512515126151271512815129151301513115132151331513415135151361513715138151391514015141151421514315144151451514615147151481514915150151511515215153151541515515156151571515815159151601516115162151631516415165151661516715168151691517015171151721517315174151751517615177151781517915180151811518215183151841518515186151871518815189151901519115192151931519415195151961519715198151991520015201152021520315204152051520615207152081520915210152111521215213152141521515216152171521815219152201522115222152231522415225152261522715228152291523015231152321523315234152351523615237152381523915240152411524215243152441524515246152471524815249152501525115252152531525415255152561525715258152591526015261152621526315264152651526615267152681526915270152711527215273152741527515276152771527815279152801528115282152831528415285152861528715288152891529015291152921529315294152951529615297152981529915300153011530215303153041530515306153071530815309153101531115312153131531415315153161531715318153191532015321153221532315324153251532615327153281532915330153311533215333153341533515336153371533815339153401534115342153431534415345153461534715348153491535015351153521535315354153551535615357153581535915360153611536215363153641536515366153671536815369153701537115372153731537415375153761537715378153791538015381153821538315384153851538615387153881538915390153911539215393153941539515396153971539815399154001540115402154031540415405154061540715408154091541015411154121541315414154151541615417154181541915420154211542215423154241542515426154271542815429154301543115432154331543415435154361543715438154391544015441154421544315444154451544615447154481544915450154511545215453154541545515456154571545815459154601546115462154631546415465154661546715468154691547015471154721547315474154751547615477154781547915480154811548215483154841548515486154871548815489154901549115492154931549415495154961549715498154991550015501155021550315504155051550615507155081550915510155111551215513155141551515516155171551815519155201552115522155231552415525155261552715528155291553015531155321553315534155351553615537155381553915540155411554215543155441554515546155471554815549155501555115552155531555415555155561555715558155591556015561155621556315564155651556615567155681556915570155711557215573155741557515576155771557815579155801558115582155831558415585155861558715588155891559015591155921559315594155951559615597155981559915600156011560215603156041560515606156071560815609156101561115612156131561415615156161561715618156191562015621156221562315624156251562615627156281562915630156311563215633156341563515636156371563815639156401564115642156431564415645156461564715648156491565015651156521565315654156551565615657156581565915660156611566215663156641566515666156671566815669156701567115672156731567415675156761567715678156791568015681156821568315684156851568615687156881568915690156911569215693156941569515696156971569815699157001570115702157031570415705157061570715708157091571015711157121571315714157151571615717157181571915720157211572215723157241572515726157271572815729157301573115732157331573415735157361573715738157391574015741157421574315744157451574615747157481574915750157511575215753157541575515756157571575815759157601576115762157631576415765157661576715768157691577015771157721577315774157751577615777157781577915780157811578215783157841578515786157871578815789157901579115792157931579415795157961579715798157991580015801158021580315804158051580615807158081580915810158111581215813158141581515816158171581815819158201582115822158231582415825158261582715828158291583015831158321583315834158351583615837158381583915840158411584215843158441584515846158471584815849158501585115852158531585415855158561585715858158591586015861158621586315864158651586615867158681586915870158711587215873158741587515876158771587815879158801588115882158831588415885158861588715888158891589015891158921589315894158951589615897158981589915900159011590215903159041590515906159071590815909159101591115912159131591415915159161591715918159191592015921159221592315924159251592615927159281592915930159311593215933159341593515936159371593815939159401594115942159431594415945159461594715948159491595015951159521595315954159551595615957159581595915960159611596215963159641596515966159671596815969159701597115972159731597415975159761597715978159791598015981159821598315984159851598615987159881598915990159911599215993159941599515996159971599815999160001600116002160031600416005160061600716008160091601016011160121601316014160151601616017160181601916020160211602216023160241602516026160271602816029160301603116032160331603416035160361603716038160391604016041160421604316044160451604616047160481604916050160511605216053160541605516056160571605816059160601606116062160631606416065160661606716068160691607016071160721607316074160751607616077160781607916080160811608216083160841608516086160871608816089160901609116092160931609416095160961609716098160991610016101161021610316104161051610616107161081610916110161111611216113161141611516116161171611816119161201612116122161231612416125161261612716128161291613016131161321613316134161351613616137161381613916140161411614216143161441614516146161471614816149161501615116152161531615416155161561615716158161591616016161161621616316164161651616616167161681616916170161711617216173161741617516176161771617816179161801618116182161831618416185161861618716188161891619016191161921619316194161951619616197161981619916200162011620216203162041620516206162071620816209162101621116212162131621416215162161621716218162191622016221162221622316224162251622616227162281622916230162311623216233162341623516236162371623816239162401624116242162431624416245162461624716248162491625016251162521625316254162551625616257162581625916260162611626216263162641626516266162671626816269162701627116272162731627416275162761627716278162791628016281162821628316284162851628616287162881628916290162911629216293162941629516296162971629816299163001630116302163031630416305163061630716308163091631016311163121631316314163151631616317163181631916320163211632216323163241632516326163271632816329163301633116332163331633416335163361633716338163391634016341163421634316344163451634616347163481634916350163511635216353163541635516356163571635816359163601636116362163631636416365163661636716368163691637016371163721637316374163751637616377163781637916380163811638216383163841638516386163871638816389163901639116392163931639416395163961639716398163991640016401164021640316404164051640616407164081640916410164111641216413164141641516416164171641816419164201642116422164231642416425164261642716428164291643016431164321643316434164351643616437164381643916440164411644216443164441644516446164471644816449164501645116452164531645416455164561645716458164591646016461164621646316464164651646616467164681646916470164711647216473164741647516476164771647816479164801648116482164831648416485164861648716488164891649016491164921649316494164951649616497164981649916500165011650216503165041650516506165071650816509165101651116512165131651416515165161651716518165191652016521165221652316524165251652616527165281652916530165311653216533165341653516536165371653816539165401654116542165431654416545165461654716548165491655016551165521655316554165551655616557165581655916560165611656216563165641656516566165671656816569165701657116572165731657416575165761657716578165791658016581165821658316584165851658616587165881658916590165911659216593165941659516596165971659816599166001660116602166031660416605166061660716608166091661016611166121661316614166151661616617166181661916620166211662216623166241662516626166271662816629166301663116632166331663416635166361663716638166391664016641166421664316644166451664616647166481664916650166511665216653166541665516656166571665816659166601666116662166631666416665166661666716668166691667016671166721667316674166751667616677166781667916680166811668216683166841668516686166871668816689166901669116692166931669416695166961669716698166991670016701167021670316704167051670616707167081670916710167111671216713167141671516716167171671816719167201672116722167231672416725167261672716728167291673016731167321673316734167351673616737167381673916740167411674216743167441674516746167471674816749167501675116752167531675416755167561675716758167591676016761167621676316764167651676616767167681676916770167711677216773167741677516776167771677816779167801678116782167831678416785167861678716788167891679016791167921679316794167951679616797167981679916800168011680216803168041680516806168071680816809168101681116812168131681416815168161681716818168191682016821168221682316824168251682616827168281682916830168311683216833168341683516836168371683816839168401684116842168431684416845168461684716848168491685016851168521685316854168551685616857168581685916860168611686216863168641686516866168671686816869168701687116872168731687416875168761687716878168791688016881168821688316884168851688616887168881688916890168911689216893168941689516896168971689816899169001690116902169031690416905169061690716908169091691016911169121691316914169151691616917169181691916920169211692216923169241692516926169271692816929169301693116932169331693416935169361693716938169391694016941169421694316944169451694616947169481694916950169511695216953169541695516956169571695816959169601696116962169631696416965169661696716968169691697016971169721697316974169751697616977169781697916980169811698216983169841698516986169871698816989169901699116992169931699416995169961699716998169991700017001170021700317004170051700617007170081700917010170111701217013170141701517016170171701817019170201702117022170231702417025170261702717028170291703017031170321703317034170351703617037170381703917040170411704217043170441704517046170471704817049170501705117052170531705417055170561705717058170591706017061170621706317064170651706617067170681706917070170711707217073170741707517076170771707817079170801708117082170831708417085170861708717088170891709017091170921709317094170951709617097170981709917100171011710217103171041710517106171071710817109171101711117112171131711417115171161711717118171191712017121171221712317124171251712617127171281712917130171311713217133171341713517136171371713817139171401714117142171431714417145171461714717148171491715017151171521715317154171551715617157171581715917160171611716217163171641716517166171671716817169171701717117172171731717417175171761717717178171791718017181171821718317184171851718617187171881718917190171911719217193171941719517196171971719817199172001720117202172031720417205172061720717208172091721017211172121721317214172151721617217172181721917220172211722217223172241722517226172271722817229172301723117232172331723417235172361723717238172391724017241172421724317244172451724617247172481724917250172511725217253172541725517256172571725817259172601726117262172631726417265172661726717268172691727017271172721727317274172751727617277172781727917280172811728217283172841728517286172871728817289172901729117292172931729417295172961729717298172991730017301173021730317304173051730617307173081730917310173111731217313173141731517316173171731817319173201732117322173231732417325173261732717328173291733017331173321733317334173351733617337173381733917340173411734217343173441734517346173471734817349173501735117352173531735417355173561735717358173591736017361173621736317364173651736617367173681736917370173711737217373173741737517376173771737817379173801738117382173831738417385173861738717388173891739017391173921739317394173951739617397173981739917400174011740217403174041740517406174071740817409174101741117412174131741417415174161741717418174191742017421174221742317424174251742617427174281742917430174311743217433174341743517436174371743817439174401744117442174431744417445174461744717448174491745017451174521745317454174551745617457174581745917460174611746217463174641746517466174671746817469174701747117472174731747417475174761747717478174791748017481174821748317484174851748617487174881748917490174911749217493174941749517496174971749817499175001750117502175031750417505175061750717508175091751017511175121751317514175151751617517175181751917520175211752217523175241752517526175271752817529175301753117532175331753417535175361753717538175391754017541175421754317544175451754617547175481754917550175511755217553175541755517556175571755817559175601756117562175631756417565175661756717568175691757017571175721757317574175751757617577175781757917580175811758217583175841758517586175871758817589175901759117592175931759417595175961759717598175991760017601176021760317604176051760617607176081760917610176111761217613176141761517616176171761817619176201762117622176231762417625176261762717628176291763017631176321763317634176351763617637176381763917640176411764217643176441764517646176471764817649176501765117652176531765417655176561765717658176591766017661176621766317664176651766617667176681766917670176711767217673176741767517676176771767817679176801768117682176831768417685176861768717688176891769017691176921769317694176951769617697176981769917700177011770217703177041770517706177071770817709177101771117712177131771417715177161771717718177191772017721177221772317724177251772617727177281772917730177311773217733177341773517736177371773817739177401774117742177431774417745177461774717748177491775017751177521775317754177551775617757177581775917760177611776217763177641776517766177671776817769177701777117772177731777417775177761777717778177791778017781177821778317784177851778617787177881778917790177911779217793177941779517796177971779817799178001780117802178031780417805178061780717808178091781017811178121781317814178151781617817178181781917820178211782217823178241782517826178271782817829178301783117832178331783417835178361783717838178391784017841178421784317844178451784617847178481784917850178511785217853178541785517856178571785817859178601786117862178631786417865178661786717868178691787017871178721787317874178751787617877178781787917880178811788217883178841788517886178871788817889178901789117892178931789417895178961789717898178991790017901179021790317904179051790617907179081790917910179111791217913179141791517916179171791817919179201792117922179231792417925179261792717928179291793017931179321793317934179351793617937179381793917940179411794217943179441794517946179471794817949179501795117952179531795417955179561795717958179591796017961179621796317964179651796617967179681796917970179711797217973179741797517976179771797817979179801798117982179831798417985179861798717988179891799017991179921799317994179951799617997179981799918000180011800218003180041800518006180071800818009180101801118012180131801418015180161801718018180191802018021180221802318024180251802618027180281802918030180311803218033180341803518036180371803818039180401804118042180431804418045180461804718048180491805018051180521805318054180551805618057180581805918060180611806218063180641806518066180671806818069180701807118072180731807418075180761807718078180791808018081180821808318084180851808618087180881808918090180911809218093180941809518096180971809818099181001810118102181031810418105181061810718108181091811018111181121811318114181151811618117181181811918120181211812218123181241812518126181271812818129181301813118132181331813418135181361813718138181391814018141181421814318144181451814618147181481814918150181511815218153181541815518156181571815818159181601816118162181631816418165181661816718168181691817018171181721817318174181751817618177181781817918180181811818218183181841818518186181871818818189181901819118192181931819418195181961819718198181991820018201182021820318204182051820618207182081820918210182111821218213182141821518216182171821818219182201822118222182231822418225182261822718228182291823018231182321823318234182351823618237182381823918240182411824218243182441824518246182471824818249182501825118252182531825418255182561825718258182591826018261182621826318264182651826618267182681826918270182711827218273182741827518276182771827818279182801828118282182831828418285182861828718288182891829018291182921829318294182951829618297182981829918300183011830218303183041830518306183071830818309183101831118312183131831418315183161831718318183191832018321183221832318324183251832618327183281832918330183311833218333183341833518336183371833818339183401834118342183431834418345183461834718348183491835018351183521835318354183551835618357183581835918360183611836218363183641836518366183671836818369183701837118372183731837418375183761837718378183791838018381183821838318384183851838618387183881838918390183911839218393183941839518396183971839818399184001840118402184031840418405184061840718408184091841018411184121841318414184151841618417184181841918420184211842218423184241842518426184271842818429184301843118432184331843418435184361843718438184391844018441184421844318444184451844618447184481844918450184511845218453184541845518456184571845818459184601846118462184631846418465184661846718468184691847018471184721847318474184751847618477184781847918480184811848218483184841848518486184871848818489184901849118492184931849418495184961849718498184991850018501185021850318504185051850618507185081850918510185111851218513185141851518516185171851818519185201852118522185231852418525185261852718528185291853018531185321853318534185351853618537185381853918540185411854218543185441854518546185471854818549185501855118552185531855418555185561855718558185591856018561185621856318564185651856618567185681856918570185711857218573185741857518576185771857818579185801858118582185831858418585185861858718588185891859018591185921859318594185951859618597185981859918600186011860218603186041860518606186071860818609186101861118612186131861418615186161861718618186191862018621186221862318624186251862618627186281862918630186311863218633186341863518636186371863818639186401864118642186431864418645186461864718648186491865018651186521865318654186551865618657186581865918660186611866218663186641866518666186671866818669186701867118672186731867418675186761867718678186791868018681186821868318684186851868618687186881868918690186911869218693186941869518696186971869818699187001870118702187031870418705187061870718708187091871018711187121871318714187151871618717187181871918720187211872218723187241872518726187271872818729187301873118732187331873418735187361873718738187391874018741187421874318744187451874618747187481874918750187511875218753187541875518756187571875818759187601876118762187631876418765187661876718768187691877018771187721877318774187751877618777187781877918780187811878218783187841878518786187871878818789187901879118792187931879418795187961879718798187991880018801188021880318804188051880618807188081880918810188111881218813188141881518816188171881818819188201882118822188231882418825188261882718828188291883018831188321883318834188351883618837188381883918840188411884218843188441884518846188471884818849188501885118852188531885418855188561885718858188591886018861188621886318864188651886618867188681886918870188711887218873188741887518876188771887818879188801888118882188831888418885188861888718888188891889018891188921889318894188951889618897188981889918900189011890218903189041890518906189071890818909189101891118912189131891418915189161891718918189191892018921189221892318924189251892618927189281892918930189311893218933189341893518936189371893818939189401894118942189431894418945189461894718948189491895018951189521895318954189551895618957189581895918960189611896218963189641896518966189671896818969189701897118972189731897418975189761897718978189791898018981189821898318984189851898618987189881898918990189911899218993189941899518996189971899818999190001900119002190031900419005190061900719008190091901019011190121901319014190151901619017190181901919020190211902219023190241902519026190271902819029190301903119032190331903419035190361903719038190391904019041190421904319044190451904619047190481904919050190511905219053190541905519056190571905819059190601906119062190631906419065190661906719068190691907019071190721907319074190751907619077190781907919080190811908219083190841908519086190871908819089190901909119092190931909419095190961909719098190991910019101191021910319104191051910619107191081910919110191111911219113191141911519116191171911819119191201912119122191231912419125191261912719128191291913019131191321913319134191351913619137191381913919140191411914219143191441914519146191471914819149191501915119152191531915419155191561915719158191591916019161191621916319164191651916619167191681916919170191711917219173191741917519176191771917819179191801918119182191831918419185191861918719188191891919019191191921919319194191951919619197191981919919200192011920219203192041920519206192071920819209192101921119212192131921419215192161921719218192191922019221192221922319224192251922619227192281922919230192311923219233192341923519236192371923819239192401924119242192431924419245192461924719248192491925019251192521925319254192551925619257192581925919260192611926219263192641926519266192671926819269192701927119272192731927419275192761927719278192791928019281192821928319284192851928619287192881928919290192911929219293192941929519296192971929819299193001930119302193031930419305193061930719308193091931019311193121931319314193151931619317193181931919320193211932219323193241932519326193271932819329193301933119332193331933419335193361933719338193391934019341193421934319344193451934619347193481934919350193511935219353193541935519356193571935819359193601936119362193631936419365193661936719368193691937019371193721937319374193751937619377193781937919380193811938219383193841938519386193871938819389193901939119392193931939419395193961939719398193991940019401194021940319404194051940619407194081940919410194111941219413194141941519416194171941819419194201942119422194231942419425194261942719428194291943019431194321943319434194351943619437194381943919440194411944219443194441944519446194471944819449194501945119452194531945419455194561945719458194591946019461194621946319464194651946619467194681946919470194711947219473194741947519476194771947819479194801948119482194831948419485194861948719488194891949019491194921949319494194951949619497194981949919500195011950219503195041950519506195071950819509195101951119512195131951419515195161951719518195191952019521195221952319524195251952619527195281952919530195311953219533195341953519536195371953819539195401954119542195431954419545195461954719548195491955019551195521955319554195551955619557195581955919560195611956219563195641956519566195671956819569195701957119572195731957419575195761957719578195791958019581195821958319584195851958619587195881958919590195911959219593195941959519596195971959819599196001960119602196031960419605196061960719608196091961019611196121961319614196151961619617196181961919620196211962219623196241962519626196271962819629196301963119632196331963419635196361963719638196391964019641196421964319644196451964619647196481964919650196511965219653196541965519656196571965819659196601966119662196631966419665196661966719668196691967019671196721967319674196751967619677196781967919680196811968219683196841968519686196871968819689196901969119692196931969419695196961969719698196991970019701197021970319704197051970619707197081970919710197111971219713197141971519716197171971819719197201972119722197231972419725197261972719728197291973019731197321973319734197351973619737197381973919740197411974219743197441974519746197471974819749197501975119752197531975419755197561975719758197591976019761197621976319764197651976619767197681976919770197711977219773197741977519776197771977819779197801978119782197831978419785197861978719788197891979019791197921979319794197951979619797197981979919800198011980219803198041980519806198071980819809198101981119812198131981419815198161981719818198191982019821198221982319824198251982619827198281982919830198311983219833198341983519836198371983819839198401984119842198431984419845198461984719848198491985019851198521985319854198551985619857198581985919860198611986219863198641986519866198671986819869198701987119872198731987419875198761987719878198791988019881198821988319884198851988619887198881988919890198911989219893198941989519896198971989819899199001990119902199031990419905199061990719908199091991019911199121991319914199151991619917199181991919920199211992219923199241992519926199271992819929199301993119932199331993419935199361993719938199391994019941199421994319944199451994619947199481994919950199511995219953199541995519956199571995819959199601996119962199631996419965199661996719968199691997019971199721997319974199751997619977199781997919980199811998219983199841998519986199871998819989199901999119992199931999419995199961999719998199992000020001200022000320004200052000620007200082000920010200112001220013200142001520016200172001820019200202002120022200232002420025200262002720028200292003020031200322003320034200352003620037200382003920040200412004220043200442004520046200472004820049200502005120052200532005420055200562005720058200592006020061200622006320064200652006620067200682006920070200712007220073200742007520076200772007820079200802008120082200832008420085200862008720088200892009020091200922009320094200952009620097200982009920100201012010220103201042010520106201072010820109201102011120112201132011420115201162011720118201192012020121201222012320124201252012620127201282012920130201312013220133201342013520136201372013820139201402014120142201432014420145201462014720148201492015020151201522015320154201552015620157201582015920160201612016220163201642016520166201672016820169201702017120172201732017420175201762017720178201792018020181201822018320184201852018620187201882018920190201912019220193201942019520196201972019820199202002020120202202032020420205202062020720208202092021020211202122021320214202152021620217202182021920220202212022220223202242022520226202272022820229202302023120232202332023420235202362023720238202392024020241202422024320244202452024620247202482024920250202512025220253202542025520256202572025820259202602026120262202632026420265202662026720268202692027020271202722027320274202752027620277202782027920280202812028220283202842028520286202872028820289202902029120292202932029420295202962029720298202992030020301203022030320304203052030620307203082030920310203112031220313203142031520316203172031820319203202032120322203232032420325203262032720328203292033020331203322033320334203352033620337203382033920340203412034220343203442034520346203472034820349203502035120352203532035420355203562035720358203592036020361203622036320364203652036620367203682036920370203712037220373203742037520376203772037820379203802038120382203832038420385203862038720388203892039020391203922039320394203952039620397203982039920400204012040220403204042040520406204072040820409204102041120412204132041420415204162041720418204192042020421204222042320424204252042620427204282042920430204312043220433204342043520436204372043820439204402044120442204432044420445204462044720448204492045020451204522045320454204552045620457204582045920460204612046220463204642046520466204672046820469204702047120472204732047420475204762047720478204792048020481204822048320484204852048620487204882048920490204912049220493204942049520496204972049820499205002050120502205032050420505205062050720508205092051020511205122051320514205152051620517205182051920520205212052220523205242052520526205272052820529205302053120532205332053420535205362053720538205392054020541205422054320544205452054620547205482054920550205512055220553205542055520556205572055820559205602056120562205632056420565205662056720568205692057020571205722057320574205752057620577205782057920580205812058220583205842058520586205872058820589205902059120592205932059420595205962059720598205992060020601206022060320604206052060620607206082060920610206112061220613206142061520616206172061820619206202062120622206232062420625206262062720628206292063020631206322063320634206352063620637206382063920640206412064220643206442064520646206472064820649206502065120652206532065420655206562065720658206592066020661206622066320664206652066620667206682066920670206712067220673206742067520676206772067820679206802068120682206832068420685206862068720688206892069020691206922069320694206952069620697206982069920700207012070220703207042070520706207072070820709207102071120712207132071420715207162071720718207192072020721207222072320724207252072620727207282072920730207312073220733207342073520736207372073820739207402074120742207432074420745207462074720748207492075020751207522075320754207552075620757207582075920760207612076220763207642076520766207672076820769207702077120772207732077420775207762077720778207792078020781207822078320784207852078620787207882078920790207912079220793207942079520796207972079820799208002080120802208032080420805208062080720808208092081020811208122081320814208152081620817208182081920820208212082220823208242082520826208272082820829208302083120832208332083420835208362083720838208392084020841208422084320844208452084620847208482084920850208512085220853208542085520856208572085820859208602086120862208632086420865208662086720868208692087020871208722087320874208752087620877208782087920880208812088220883208842088520886208872088820889208902089120892208932089420895208962089720898208992090020901209022090320904209052090620907209082090920910209112091220913209142091520916209172091820919209202092120922209232092420925209262092720928209292093020931209322093320934209352093620937209382093920940209412094220943209442094520946209472094820949209502095120952209532095420955209562095720958209592096020961209622096320964209652096620967209682096920970209712097220973209742097520976209772097820979209802098120982209832098420985209862098720988209892099020991209922099320994209952099620997209982099921000210012100221003210042100521006210072100821009210102101121012210132101421015210162101721018210192102021021210222102321024210252102621027210282102921030210312103221033210342103521036210372103821039210402104121042210432104421045210462104721048210492105021051210522105321054210552105621057210582105921060210612106221063210642106521066210672106821069210702107121072210732107421075210762107721078210792108021081210822108321084210852108621087210882108921090210912109221093210942109521096210972109821099211002110121102211032110421105211062110721108211092111021111211122111321114211152111621117211182111921120211212112221123211242112521126211272112821129211302113121132211332113421135211362113721138211392114021141211422114321144211452114621147211482114921150211512115221153211542115521156211572115821159211602116121162211632116421165211662116721168211692117021171211722117321174211752117621177211782117921180211812118221183211842118521186211872118821189211902119121192211932119421195211962119721198211992120021201212022120321204212052120621207212082120921210212112121221213212142121521216212172121821219212202122121222212232122421225212262122721228212292123021231212322123321234212352123621237212382123921240212412124221243212442124521246212472124821249212502125121252212532125421255212562125721258212592126021261212622126321264212652126621267212682126921270212712127221273212742127521276212772127821279212802128121282212832128421285212862128721288212892129021291212922129321294212952129621297212982129921300213012130221303213042130521306213072130821309213102131121312213132131421315213162131721318213192132021321213222132321324213252132621327213282132921330213312133221333213342133521336213372133821339213402134121342213432134421345213462134721348213492135021351213522135321354213552135621357213582135921360213612136221363213642136521366213672136821369213702137121372213732137421375213762137721378213792138021381213822138321384213852138621387213882138921390213912139221393213942139521396213972139821399214002140121402214032140421405214062140721408214092141021411214122141321414214152141621417214182141921420214212142221423214242142521426214272142821429214302143121432214332143421435214362143721438214392144021441214422144321444214452144621447214482144921450214512145221453214542145521456214572145821459214602146121462214632146421465214662146721468214692147021471214722147321474214752147621477214782147921480214812148221483214842148521486214872148821489214902149121492214932149421495214962149721498214992150021501215022150321504215052150621507215082150921510215112151221513215142151521516215172151821519215202152121522215232152421525215262152721528215292153021531215322153321534215352153621537215382153921540215412154221543215442154521546215472154821549215502155121552215532155421555215562155721558215592156021561215622156321564215652156621567215682156921570215712157221573215742157521576215772157821579215802158121582215832158421585215862158721588215892159021591215922159321594215952159621597215982159921600216012160221603216042160521606216072160821609216102161121612216132161421615216162161721618216192162021621216222162321624216252162621627216282162921630216312163221633216342163521636216372163821639216402164121642216432164421645216462164721648216492165021651216522165321654216552165621657216582165921660216612166221663216642166521666216672166821669216702167121672216732167421675216762167721678216792168021681216822168321684216852168621687216882168921690216912169221693216942169521696216972169821699217002170121702217032170421705217062170721708217092171021711217122171321714217152171621717217182171921720217212172221723217242172521726217272172821729217302173121732217332173421735217362173721738217392174021741217422174321744217452174621747217482174921750217512175221753217542175521756217572175821759217602176121762217632176421765217662176721768217692177021771217722177321774217752177621777217782177921780217812178221783217842178521786217872178821789217902179121792217932179421795217962179721798217992180021801218022180321804218052180621807218082180921810218112181221813218142181521816218172181821819218202182121822218232182421825218262182721828218292183021831218322183321834218352183621837218382183921840218412184221843218442184521846218472184821849218502185121852218532185421855218562185721858218592186021861218622186321864218652186621867218682186921870218712187221873218742187521876218772187821879218802188121882218832188421885218862188721888218892189021891218922189321894218952189621897218982189921900219012190221903219042190521906219072190821909219102191121912219132191421915219162191721918219192192021921219222192321924219252192621927219282192921930219312193221933219342193521936219372193821939219402194121942219432194421945219462194721948219492195021951219522195321954219552195621957219582195921960219612196221963219642196521966219672196821969219702197121972219732197421975219762197721978219792198021981219822198321984219852198621987219882198921990219912199221993219942199521996219972199821999220002200122002220032200422005220062200722008220092201022011220122201322014220152201622017220182201922020220212202222023220242202522026220272202822029220302203122032220332203422035220362203722038220392204022041220422204322044220452204622047220482204922050220512205222053220542205522056220572205822059220602206122062220632206422065220662206722068220692207022071220722207322074220752207622077220782207922080220812208222083220842208522086220872208822089220902209122092220932209422095220962209722098220992210022101221022210322104221052210622107221082210922110221112211222113221142211522116221172211822119221202212122122221232212422125221262212722128221292213022131221322213322134221352213622137221382213922140221412214222143221442214522146221472214822149221502215122152221532215422155221562215722158221592216022161221622216322164221652216622167221682216922170221712217222173221742217522176221772217822179221802218122182221832218422185221862218722188221892219022191221922219322194221952219622197221982219922200222012220222203222042220522206222072220822209222102221122212222132221422215222162221722218222192222022221222222222322224222252222622227222282222922230222312223222233222342223522236222372223822239222402224122242222432224422245222462224722248222492225022251222522225322254222552225622257222582225922260222612226222263222642226522266222672226822269222702227122272222732227422275222762227722278222792228022281222822228322284222852228622287222882228922290222912229222293222942229522296222972229822299223002230122302223032230422305223062230722308223092231022311223122231322314223152231622317223182231922320223212232222323223242232522326223272232822329223302233122332223332233422335223362233722338223392234022341223422234322344223452234622347223482234922350223512235222353223542235522356223572235822359223602236122362223632236422365223662236722368223692237022371223722237322374223752237622377223782237922380223812238222383223842238522386223872238822389223902239122392223932239422395223962239722398223992240022401224022240322404224052240622407224082240922410224112241222413224142241522416224172241822419224202242122422224232242422425224262242722428224292243022431224322243322434224352243622437224382243922440224412244222443224442244522446224472244822449224502245122452224532245422455224562245722458224592246022461224622246322464224652246622467224682246922470224712247222473224742247522476224772247822479224802248122482224832248422485224862248722488224892249022491224922249322494224952249622497224982249922500225012250222503225042250522506225072250822509225102251122512225132251422515225162251722518225192252022521225222252322524225252252622527225282252922530225312253222533225342253522536225372253822539225402254122542225432254422545225462254722548225492255022551225522255322554225552255622557225582255922560225612256222563225642256522566225672256822569225702257122572225732257422575225762257722578225792258022581225822258322584225852258622587225882258922590225912259222593225942259522596225972259822599226002260122602226032260422605226062260722608226092261022611226122261322614226152261622617226182261922620226212262222623226242262522626226272262822629226302263122632226332263422635226362263722638226392264022641226422264322644226452264622647226482264922650226512265222653226542265522656226572265822659226602266122662226632266422665226662266722668226692267022671226722267322674226752267622677226782267922680226812268222683226842268522686226872268822689226902269122692226932269422695226962269722698226992270022701227022270322704227052270622707227082270922710227112271222713227142271522716227172271822719227202272122722227232272422725227262272722728227292273022731227322273322734227352273622737227382273922740227412274222743227442274522746227472274822749227502275122752227532275422755227562275722758227592276022761227622276322764227652276622767227682276922770227712277222773227742277522776227772277822779227802278122782227832278422785227862278722788227892279022791227922279322794227952279622797227982279922800228012280222803228042280522806228072280822809228102281122812228132281422815228162281722818228192282022821228222282322824228252282622827228282282922830228312283222833228342283522836228372283822839228402284122842228432284422845228462284722848228492285022851228522285322854228552285622857228582285922860228612286222863228642286522866228672286822869228702287122872228732287422875228762287722878228792288022881228822288322884228852288622887228882288922890228912289222893228942289522896228972289822899229002290122902229032290422905229062290722908229092291022911229122291322914229152291622917229182291922920229212292222923229242292522926229272292822929229302293122932229332293422935229362293722938229392294022941229422294322944229452294622947229482294922950229512295222953229542295522956229572295822959229602296122962229632296422965229662296722968229692297022971229722297322974229752297622977229782297922980229812298222983229842298522986229872298822989229902299122992229932299422995229962299722998229992300023001230022300323004230052300623007230082300923010230112301223013230142301523016230172301823019230202302123022230232302423025230262302723028230292303023031230322303323034230352303623037230382303923040230412304223043230442304523046230472304823049230502305123052230532305423055230562305723058230592306023061230622306323064230652306623067230682306923070230712307223073230742307523076230772307823079230802308123082230832308423085230862308723088230892309023091230922309323094230952309623097230982309923100231012310223103231042310523106231072310823109231102311123112231132311423115231162311723118231192312023121231222312323124231252312623127231282312923130231312313223133231342313523136231372313823139231402314123142231432314423145231462314723148231492315023151231522315323154231552315623157231582315923160231612316223163231642316523166231672316823169231702317123172231732317423175231762317723178231792318023181231822318323184231852318623187231882318923190231912319223193231942319523196231972319823199232002320123202232032320423205232062320723208232092321023211232122321323214232152321623217232182321923220232212322223223232242322523226232272322823229232302323123232232332323423235232362323723238232392324023241232422324323244232452324623247232482324923250232512325223253232542325523256232572325823259232602326123262232632326423265232662326723268232692327023271232722327323274232752327623277232782327923280232812328223283232842328523286232872328823289232902329123292232932329423295232962329723298232992330023301233022330323304233052330623307233082330923310233112331223313233142331523316233172331823319233202332123322233232332423325233262332723328233292333023331233322333323334233352333623337233382333923340233412334223343233442334523346233472334823349233502335123352233532335423355233562335723358233592336023361233622336323364233652336623367233682336923370233712337223373233742337523376233772337823379233802338123382233832338423385233862338723388233892339023391233922339323394233952339623397233982339923400234012340223403234042340523406234072340823409234102341123412234132341423415234162341723418234192342023421234222342323424234252342623427234282342923430234312343223433234342343523436234372343823439234402344123442234432344423445234462344723448234492345023451234522345323454234552345623457234582345923460234612346223463234642346523466234672346823469234702347123472234732347423475234762347723478234792348023481234822348323484234852348623487234882348923490234912349223493234942349523496234972349823499235002350123502235032350423505235062350723508235092351023511235122351323514235152351623517235182351923520235212352223523235242352523526235272352823529235302353123532235332353423535235362353723538235392354023541235422354323544235452354623547235482354923550235512355223553235542355523556235572355823559235602356123562235632356423565235662356723568235692357023571235722357323574235752357623577235782357923580235812358223583235842358523586235872358823589235902359123592235932359423595235962359723598235992360023601236022360323604236052360623607236082360923610236112361223613236142361523616236172361823619236202362123622236232362423625236262362723628236292363023631236322363323634236352363623637236382363923640236412364223643236442364523646236472364823649236502365123652236532365423655236562365723658236592366023661236622366323664236652366623667236682366923670236712367223673236742367523676236772367823679236802368123682236832368423685236862368723688236892369023691236922369323694236952369623697236982369923700237012370223703237042370523706237072370823709237102371123712237132371423715237162371723718237192372023721237222372323724237252372623727237282372923730237312373223733237342373523736237372373823739237402374123742237432374423745237462374723748237492375023751237522375323754237552375623757237582375923760237612376223763237642376523766237672376823769237702377123772237732377423775237762377723778237792378023781237822378323784237852378623787237882378923790237912379223793237942379523796237972379823799238002380123802238032380423805238062380723808238092381023811238122381323814238152381623817238182381923820238212382223823238242382523826238272382823829238302383123832238332383423835238362383723838238392384023841238422384323844238452384623847238482384923850238512385223853238542385523856238572385823859238602386123862238632386423865238662386723868238692387023871238722387323874238752387623877238782387923880238812388223883238842388523886238872388823889238902389123892238932389423895238962389723898238992390023901239022390323904239052390623907239082390923910239112391223913239142391523916239172391823919239202392123922239232392423925239262392723928239292393023931239322393323934239352393623937239382393923940239412394223943239442394523946239472394823949239502395123952239532395423955239562395723958239592396023961239622396323964239652396623967239682396923970239712397223973239742397523976239772397823979239802398123982239832398423985239862398723988239892399023991239922399323994239952399623997239982399924000240012400224003240042400524006240072400824009240102401124012240132401424015240162401724018240192402024021240222402324024240252402624027240282402924030240312403224033240342403524036240372403824039240402404124042240432404424045240462404724048240492405024051240522405324054240552405624057240582405924060240612406224063240642406524066240672406824069240702407124072240732407424075240762407724078240792408024081240822408324084240852408624087240882408924090240912409224093240942409524096240972409824099241002410124102241032410424105241062410724108241092411024111241122411324114241152411624117241182411924120241212412224123241242412524126241272412824129241302413124132241332413424135241362413724138241392414024141241422414324144241452414624147241482414924150241512415224153241542415524156241572415824159241602416124162241632416424165241662416724168241692417024171241722417324174241752417624177241782417924180241812418224183241842418524186241872418824189241902419124192241932419424195241962419724198241992420024201242022420324204242052420624207242082420924210242112421224213242142421524216242172421824219242202422124222242232422424225242262422724228242292423024231242322423324234242352423624237242382423924240242412424224243242442424524246242472424824249242502425124252242532425424255242562425724258242592426024261242622426324264242652426624267242682426924270242712427224273242742427524276242772427824279242802428124282242832428424285242862428724288242892429024291242922429324294242952429624297242982429924300243012430224303243042430524306243072430824309243102431124312243132431424315243162431724318243192432024321243222432324324243252432624327243282432924330243312433224333243342433524336243372433824339243402434124342243432434424345243462434724348243492435024351243522435324354243552435624357243582435924360243612436224363243642436524366243672436824369243702437124372243732437424375243762437724378243792438024381243822438324384243852438624387243882438924390243912439224393243942439524396243972439824399244002440124402244032440424405244062440724408244092441024411244122441324414244152441624417244182441924420244212442224423244242442524426244272442824429244302443124432244332443424435244362443724438244392444024441244422444324444244452444624447244482444924450244512445224453244542445524456244572445824459244602446124462244632446424465244662446724468244692447024471244722447324474244752447624477244782447924480244812448224483244842448524486244872448824489244902449124492244932449424495244962449724498244992450024501245022450324504245052450624507245082450924510245112451224513245142451524516245172451824519245202452124522245232452424525245262452724528245292453024531245322453324534245352453624537245382453924540245412454224543245442454524546245472454824549245502455124552245532455424555245562455724558245592456024561245622456324564245652456624567245682456924570245712457224573245742457524576245772457824579245802458124582245832458424585245862458724588245892459024591245922459324594245952459624597245982459924600246012460224603246042460524606246072460824609246102461124612246132461424615246162461724618246192462024621246222462324624246252462624627246282462924630246312463224633246342463524636246372463824639246402464124642246432464424645246462464724648246492465024651246522465324654246552465624657246582465924660246612466224663246642466524666246672466824669246702467124672246732467424675246762467724678246792468024681246822468324684246852468624687246882468924690246912469224693246942469524696246972469824699247002470124702247032470424705247062470724708247092471024711247122471324714247152471624717247182471924720247212472224723247242472524726247272472824729247302473124732247332473424735247362473724738247392474024741247422474324744247452474624747247482474924750247512475224753247542475524756247572475824759247602476124762247632476424765247662476724768247692477024771247722477324774247752477624777247782477924780247812478224783247842478524786247872478824789247902479124792247932479424795247962479724798247992480024801248022480324804248052480624807248082480924810248112481224813248142481524816248172481824819248202482124822248232482424825248262482724828248292483024831248322483324834248352483624837248382483924840248412484224843248442484524846248472484824849248502485124852248532485424855248562485724858248592486024861248622486324864248652486624867248682486924870248712487224873248742487524876248772487824879248802488124882248832488424885248862488724888248892489024891248922489324894248952489624897248982489924900249012490224903249042490524906249072490824909249102491124912249132491424915249162491724918249192492024921249222492324924249252492624927249282492924930249312493224933249342493524936249372493824939249402494124942249432494424945249462494724948249492495024951249522495324954249552495624957249582495924960249612496224963249642496524966249672496824969249702497124972249732497424975249762497724978249792498024981249822498324984249852498624987249882498924990249912499224993249942499524996249972499824999250002500125002250032500425005250062500725008250092501025011250122501325014250152501625017250182501925020250212502225023250242502525026250272502825029250302503125032250332503425035250362503725038250392504025041250422504325044250452504625047250482504925050250512505225053250542505525056250572505825059250602506125062250632506425065250662506725068250692507025071250722507325074250752507625077250782507925080250812508225083250842508525086250872508825089250902509125092250932509425095250962509725098250992510025101251022510325104251052510625107251082510925110251112511225113251142511525116251172511825119251202512125122251232512425125251262512725128251292513025131251322513325134251352513625137251382513925140251412514225143251442514525146251472514825149251502515125152251532515425155251562515725158251592516025161251622516325164251652516625167251682516925170251712517225173251742517525176251772517825179251802518125182251832518425185251862518725188251892519025191251922519325194251952519625197251982519925200252012520225203252042520525206252072520825209252102521125212252132521425215252162521725218252192522025221252222522325224252252522625227252282522925230252312523225233252342523525236252372523825239252402524125242252432524425245252462524725248252492525025251252522525325254252552525625257252582525925260252612526225263252642526525266252672526825269252702527125272252732527425275252762527725278252792528025281252822528325284252852528625287252882528925290252912529225293252942529525296252972529825299253002530125302253032530425305253062530725308253092531025311253122531325314253152531625317253182531925320253212532225323253242532525326253272532825329253302533125332253332533425335253362533725338253392534025341253422534325344253452534625347253482534925350253512535225353253542535525356253572535825359253602536125362253632536425365253662536725368253692537025371253722537325374253752537625377253782537925380253812538225383253842538525386253872538825389253902539125392253932539425395253962539725398253992540025401254022540325404254052540625407254082540925410254112541225413254142541525416254172541825419254202542125422254232542425425254262542725428254292543025431254322543325434254352543625437254382543925440254412544225443254442544525446254472544825449254502545125452254532545425455254562545725458254592546025461254622546325464254652546625467254682546925470254712547225473254742547525476254772547825479254802548125482254832548425485254862548725488254892549025491254922549325494254952549625497254982549925500255012550225503255042550525506255072550825509255102551125512255132551425515255162551725518255192552025521255222552325524255252552625527255282552925530255312553225533255342553525536255372553825539255402554125542255432554425545255462554725548255492555025551255522555325554255552555625557255582555925560255612556225563255642556525566255672556825569255702557125572255732557425575255762557725578255792558025581255822558325584255852558625587255882558925590255912559225593255942559525596255972559825599256002560125602256032560425605256062560725608256092561025611256122561325614256152561625617256182561925620256212562225623256242562525626256272562825629256302563125632256332563425635256362563725638256392564025641256422564325644256452564625647256482564925650256512565225653256542565525656256572565825659256602566125662256632566425665256662566725668256692567025671256722567325674256752567625677256782567925680256812568225683256842568525686256872568825689256902569125692256932569425695256962569725698256992570025701257022570325704257052570625707257082570925710257112571225713257142571525716257172571825719257202572125722257232572425725257262572725728257292573025731257322573325734257352573625737257382573925740257412574225743257442574525746257472574825749257502575125752257532575425755257562575725758257592576025761257622576325764257652576625767257682576925770257712577225773257742577525776257772577825779257802578125782257832578425785257862578725788257892579025791257922579325794257952579625797257982579925800258012580225803258042580525806258072580825809258102581125812258132581425815258162581725818258192582025821258222582325824258252582625827258282582925830258312583225833258342583525836258372583825839258402584125842258432584425845258462584725848258492585025851258522585325854258552585625857258582585925860258612586225863258642586525866258672586825869258702587125872258732587425875258762587725878258792588025881258822588325884258852588625887258882588925890258912589225893258942589525896258972589825899259002590125902259032590425905259062590725908259092591025911259122591325914259152591625917259182591925920259212592225923259242592525926259272592825929259302593125932259332593425935259362593725938259392594025941259422594325944259452594625947259482594925950259512595225953259542595525956259572595825959259602596125962259632596425965259662596725968259692597025971259722597325974259752597625977259782597925980259812598225983259842598525986259872598825989259902599125992259932599425995259962599725998259992600026001260022600326004260052600626007260082600926010260112601226013260142601526016260172601826019260202602126022260232602426025260262602726028260292603026031260322603326034260352603626037260382603926040260412604226043260442604526046260472604826049260502605126052260532605426055260562605726058260592606026061260622606326064260652606626067260682606926070260712607226073260742607526076260772607826079260802608126082260832608426085260862608726088260892609026091260922609326094260952609626097260982609926100261012610226103261042610526106261072610826109261102611126112261132611426115261162611726118261192612026121261222612326124261252612626127261282612926130261312613226133261342613526136261372613826139261402614126142261432614426145261462614726148261492615026151261522615326154261552615626157261582615926160261612616226163261642616526166261672616826169261702617126172261732617426175261762617726178261792618026181261822618326184261852618626187261882618926190261912619226193261942619526196261972619826199262002620126202262032620426205262062620726208262092621026211262122621326214262152621626217262182621926220262212622226223262242622526226262272622826229262302623126232262332623426235262362623726238262392624026241262422624326244262452624626247262482624926250262512625226253262542625526256262572625826259262602626126262262632626426265262662626726268262692627026271262722627326274262752627626277262782627926280262812628226283262842628526286262872628826289262902629126292262932629426295262962629726298262992630026301263022630326304263052630626307263082630926310263112631226313263142631526316263172631826319263202632126322263232632426325263262632726328263292633026331263322633326334263352633626337263382633926340263412634226343263442634526346263472634826349263502635126352263532635426355263562635726358263592636026361263622636326364263652636626367263682636926370263712637226373263742637526376263772637826379263802638126382263832638426385263862638726388263892639026391263922639326394263952639626397263982639926400264012640226403264042640526406264072640826409264102641126412264132641426415264162641726418264192642026421264222642326424264252642626427264282642926430264312643226433264342643526436264372643826439264402644126442264432644426445264462644726448264492645026451264522645326454264552645626457264582645926460264612646226463264642646526466264672646826469264702647126472264732647426475264762647726478264792648026481264822648326484264852648626487264882648926490264912649226493264942649526496264972649826499265002650126502265032650426505265062650726508
  1. #if defined(SOKOL_IMPL) && !defined(SOKOL_GFX_IMPL)
  2. #define SOKOL_GFX_IMPL
  3. #endif
  4. #ifndef SOKOL_GFX_INCLUDED
  5. /*
  6. sokol_gfx.h -- simple 3D API wrapper
  7. Project URL: https://github.com/floooh/sokol
  8. Example code: https://github.com/floooh/sokol-samples
  9. Do this:
  10. #define SOKOL_IMPL or
  11. #define SOKOL_GFX_IMPL
  12. before you include this file in *one* C or C++ file to create the
  13. implementation.
  14. In the same place define one of the following to select the rendering
  15. backend:
  16. #define SOKOL_GLCORE
  17. #define SOKOL_GLES3
  18. #define SOKOL_D3D11
  19. #define SOKOL_METAL
  20. #define SOKOL_WGPU
  21. #define SOKOL_VULKAN
  22. #define SOKOL_DUMMY_BACKEND
  23. I.e. for the desktop GL it should look like this:
  24. #include ...
  25. #include ...
  26. #define SOKOL_IMPL
  27. #define SOKOL_GLCORE
  28. #include "sokol_gfx.h"
  29. The dummy backend replaces the platform-specific backend code with empty
  30. stub functions. This is useful for writing tests that need to run on the
  31. command line.
  32. Optionally provide the following defines with your own implementations:
  33. SOKOL_ASSERT(c) - your own assert macro (default: assert(c))
  34. SOKOL_UNREACHABLE() - a guard macro for unreachable code (default: assert(false))
  35. SOKOL_GFX_API_DECL - public function declaration prefix (default: extern)
  36. SOKOL_API_DECL - same as SOKOL_GFX_API_DECL
  37. SOKOL_API_IMPL - public function implementation prefix (default: -)
  38. SOKOL_TRACE_HOOKS - enable trace hook callbacks (search below for TRACE HOOKS)
  39. SOKOL_EXTERNAL_GL_LOADER - indicates that you're using your own GL loader, in this case
  40. sokol_gfx.h will not include any platform GL headers and disable
  41. the integrated Win32 GL loader
  42. If sokol_gfx.h is compiled as a DLL, define the following before
  43. including the declaration or implementation:
  44. SOKOL_DLL
  45. On Windows, SOKOL_DLL will define SOKOL_GFX_API_DECL as __declspec(dllexport)
  46. or __declspec(dllimport) as needed.
  47. Optionally define the following to force debug checks and validations
  48. even in release mode:
  49. SOKOL_DEBUG - by default this is defined if NDEBUG is not defined
  50. Link with the following system libraries (note that sokol_app.h has
  51. additional linker requirements):
  52. - on macOS/iOS with Metal: Metal
  53. - on macOS with GL: OpenGL
  54. - on iOS with GL: OpenGLES
  55. - on Linux with EGL: GL or GLESv2
  56. - on Linux with GLX: GL
  57. - on Android: GLESv3, log, android
  58. - on Windows with the MSVC or Clang toolchains: no action needed, libs are defined in-source via pragma-comment-lib
  59. - on Windows with MINGW/MSYS2 gcc: compile with '-mwin32' so that _WIN32 is defined
  60. - with the D3D11 backend: -ld3d11
  61. On macOS and iOS, the implementation must be compiled as Objective-C.
  62. On Emscripten:
  63. - for WebGL2: add the linker option `-s USE_WEBGL2=1`
  64. - for WebGPU: compile and link with `--use-port=emdawnwebgpu`
  65. (for more exotic situations, read: https://dawn.googlesource.com/dawn/+/refs/heads/main/src/emdawnwebgpu/pkg/README.md)
  66. sokol_gfx DOES NOT:
  67. ===================
  68. - create a window, swapchain or the 3D-API context/device, you must do this
  69. before sokol_gfx is initialized, and pass any required information
  70. (like 3D device pointers) to the sokol_gfx initialization call
  71. - present the rendered frame, how this is done exactly usually depends
  72. on how the window and 3D-API context/device was created
  73. - provide a unified shader language, instead 3D-API-specific shader
  74. source-code or shader-bytecode must be provided (for the "official"
  75. offline shader cross-compiler / code-generator, see here:
  76. https://github.com/floooh/sokol-tools/blob/master/docs/sokol-shdc.md)
  77. STEP BY STEP
  78. ============
  79. --- to initialize sokol_gfx, after creating a window and a 3D-API
  80. context/device, call:
  81. sg_setup(const sg_desc*)
  82. Depending on the selected 3D backend, sokol-gfx requires some
  83. information about its runtime environment, like a GPU device pointer,
  84. default swapchain pixel formats and so on. If you are using sokol_app.h
  85. for the window system glue, you can use a helper function provided in
  86. the sokol_glue.h header:
  87. #include "sokol_gfx.h"
  88. #include "sokol_app.h"
  89. #include "sokol_glue.h"
  90. //...
  91. sg_setup(&(sg_desc){
  92. .environment = sglue_environment(),
  93. });
  94. To get any logging output for errors and from the validation layer, you
  95. need to provide a logging callback. Easiest way is through sokol_log.h:
  96. #include "sokol_log.h"
  97. //...
  98. sg_setup(&(sg_desc){
  99. //...
  100. .logger.func = slog_func,
  101. });
  102. --- create resource objects (buffers, images, views, samplers, shaders
  103. and pipeline objects)
  104. sg_buffer sg_make_buffer(const sg_buffer_desc*)
  105. sg_image sg_make_image(const sg_image_desc*)
  106. sg_view sg_make_view(const sg_view_desc*)
  107. sg_sampler sg_make_sampler(const sg_sampler_desc*)
  108. sg_shader sg_make_shader(const sg_shader_desc*)
  109. sg_pipeline sg_make_pipeline(const sg_pipeline_desc*)
  110. --- start a render- or compute-pass:
  111. sg_begin_pass(const sg_pass* pass);
  112. Typically, render passes render into an externally provided swapchain which
  113. presents the rendering result on the display. Such a 'swapchain pass'
  114. is started like this:
  115. sg_begin_pass(&(sg_pass){ .action = { ... }, .swapchain = sglue_swapchain() })
  116. ...where .action is an sg_pass_action struct containing actions to be performed
  117. at the start and end of a render pass (such as clearing the render surfaces to
  118. a specific color), and .swapchain is an sg_swapchain struct with all the required
  119. information to render into the swapchain's surfaces.
  120. To start an 'offscreen render pass' into sokol-gfx image objects, populate
  121. the sg_pass.attachments nested struct with attachment view objects
  122. (1..4 color-attachment-views for to render into, a depth-stencil-attachment-view
  123. to provide the depth-stencil-buffer, and optionally 1..4 resolve-attachment-views
  124. for an MSAA-resolve operation:
  125. sg_begin_pass(&(sg_pass){
  126. .action = { ... },
  127. .attachments = {
  128. .colors[0] = color_attachment_view,
  129. .resolves[0] = optional_resolve_attachment_view,
  130. .depth_stencil = depth_stencil_attachment_view,
  131. },
  132. });
  133. To start a compute-pass, just set the .compute item to true:
  134. sg_begin_pass(&(sg_pass){ .compute = true });
  135. --- set the pipeline state for the next draw call with:
  136. sg_apply_pipeline(sg_pipeline pip)
  137. --- fill an sg_bindings struct with the resource bindings for the next
  138. draw- or dispatch-call (0..N vertex buffers, 0 or 1 index buffer, 0..N views,
  139. 0..N samplers), and call
  140. sg_apply_bindings(const sg_bindings* bindings)
  141. ...to update the resource bindings. Note that in a compute pass, no vertex-
  142. or index-buffer bindings can be used, and in render passes, no storage-image bindings
  143. are allowed. Those restrictions will be checked by the sokol-gfx validation layer.
  144. --- optionally update shader uniform data with:
  145. sg_apply_uniforms(int ub_slot, const sg_range* data)
  146. Read the section 'UNIFORM DATA LAYOUT' to learn about the expected memory layout
  147. of the uniform data passed into sg_apply_uniforms().
  148. --- kick off a draw call with:
  149. sg_draw(int base_element, int num_elements, int num_instances)
  150. The sg_draw() function unifies all the different ways to render primitives
  151. in a single call (indexed vs non-indexed rendering, and instanced vs non-instanced
  152. rendering). In case of indexed rendering, base_element and num_element specify
  153. indices in the currently bound index buffer. In case of non-indexed rendering
  154. base_element and num_elements specify vertices in the currently bound
  155. vertex-buffer(s). To perform instanced rendering, the rendering pipeline
  156. must be setup for instancing (see sg_pipeline_desc below), a separate vertex buffer
  157. containing per-instance data must be bound, and the num_instances parameter
  158. must be > 1.
  159. Alternatively, call:
  160. sg_draw_ex(...)
  161. to provide a base-vertex and/or base-instance which allows to render
  162. from different sections of a vertex buffer without rebinding the
  163. vertex buffer with a different offset. Note that the `sg_draw_ex()`
  164. only has limited portability on OpenGL, check the sg_limits struct
  165. members .draw_base_vertex and .draw_base_instance for runtime support,
  166. those are generally true on non-GL-backends, and on GL the feature
  167. flags are set according to the GL version:
  168. - on GL base_instance != 0 is only supported since GL 4.2
  169. - on GLES3.x, base_instance != 0 is not supported
  170. - on GLES3.x, base_vertex is only supported since GLES3.2
  171. (e.g. not supported on WebGL2)
  172. --- ...or kick of a dispatch call to invoke a compute shader workload:
  173. sg_dispatch(int num_groups_x, int num_groups_y, int num_groups_z)
  174. The dispatch args define the number of 'compute workgroups' processed
  175. by the currently applied compute shader.
  176. --- finish the current pass with:
  177. sg_end_pass()
  178. --- when done with the current frame, call
  179. sg_commit()
  180. --- at the end of your program, shutdown sokol_gfx with:
  181. sg_shutdown()
  182. --- if you need to destroy resources before sg_shutdown(), call:
  183. sg_destroy_buffer(sg_buffer buf)
  184. sg_destroy_image(sg_image img)
  185. sg_destroy_sampler(sg_sampler smp)
  186. sg_destroy_shader(sg_shader shd)
  187. sg_destroy_pipeline(sg_pipeline pip)
  188. sg_destroy_view(sg_view view)
  189. --- to set a new viewport rectangle, call:
  190. sg_apply_viewport(int x, int y, int width, int height, bool origin_top_left)
  191. ...or if you want to specify the viewport rectangle with float values:
  192. sg_apply_viewportf(float x, float y, float width, float height, bool origin_top_left)
  193. --- to set a new scissor rect, call:
  194. sg_apply_scissor_rect(int x, int y, int width, int height, bool origin_top_left)
  195. ...or with float values:
  196. sg_apply_scissor_rectf(float x, float y, float width, float height, bool origin_top_left)
  197. Both sg_apply_viewport() and sg_apply_scissor_rect() must be called
  198. inside a rendering pass (e.g. not in a compute pass, or outside a pass)
  199. Note that sg_begin_pass() will reset both the viewport and scissor
  200. rectangles to cover the entire framebuffer.
  201. --- to update (overwrite) the content of buffer and image resources, call:
  202. sg_update_buffer(sg_buffer buf, const sg_range* data)
  203. sg_update_image(sg_image img, const sg_image_data* data)
  204. Buffers and images to be updated must have been created with
  205. sg_buffer_desc.usage.dynamic_update or .stream_update.
  206. Only one update per frame is allowed for buffer and image resources when
  207. using the sg_update_*() functions. The rationale is to have a simple
  208. protection from the CPU scribbling over data the GPU is currently
  209. using, or the CPU having to wait for the GPU
  210. Buffer and image updates can be partial, as long as a rendering
  211. operation only references the valid (updated) data in the
  212. buffer or image.
  213. --- to append a chunk of data to a buffer resource, call:
  214. int sg_append_buffer(sg_buffer buf, const sg_range* data)
  215. The difference to sg_update_buffer() is that sg_append_buffer()
  216. can be called multiple times per frame to append new data to the
  217. buffer piece by piece, optionally interleaved with draw calls referencing
  218. the previously written data.
  219. sg_append_buffer() returns a byte offset to the start of the
  220. written data, this offset can be assigned to
  221. sg_bindings.vertex_buffer_offsets[n] or
  222. sg_bindings.index_buffer_offset
  223. Code example:
  224. for (...) {
  225. const void* data = ...;
  226. const int num_bytes = ...;
  227. int offset = sg_append_buffer(buf, &(sg_range) { .ptr=data, .size=num_bytes });
  228. bindings.vertex_buffer_offsets[0] = offset;
  229. sg_apply_pipeline(pip);
  230. sg_apply_bindings(&bindings);
  231. sg_apply_uniforms(...);
  232. sg_draw(...);
  233. }
  234. A buffer to be used with sg_append_buffer() must have been created
  235. with sg_buffer_desc.usage.dynamic_update or .stream_update.
  236. If the application appends more data to the buffer then fits into
  237. the buffer, the buffer will go into the "overflow" state for the
  238. rest of the frame.
  239. Any draw calls attempting to render an overflown buffer will be
  240. silently dropped (in debug mode this will also result in a
  241. validation error).
  242. You can also check manually if a buffer is in overflow-state by calling
  243. bool sg_query_buffer_overflow(sg_buffer buf)
  244. You can manually check to see if an overflow would occur before adding
  245. any data to a buffer by calling
  246. bool sg_query_buffer_will_overflow(sg_buffer buf, size_t size)
  247. NOTE: Due to restrictions in underlying 3D-APIs, appended chunks of
  248. data will be 4-byte aligned in the destination buffer. This means
  249. that there will be gaps in index buffers containing 16-bit indices
  250. when the number of indices in a call to sg_append_buffer() is
  251. odd. This isn't a problem when each call to sg_append_buffer()
  252. is associated with one draw call, but will be problematic when
  253. a single indexed draw call spans several appended chunks of indices.
  254. --- to check at runtime for optional features, limits and pixelformat support,
  255. call:
  256. sg_features sg_query_features()
  257. sg_limits sg_query_limits()
  258. sg_pixelformat_info sg_query_pixelformat(sg_pixel_format fmt)
  259. --- if you need to call into the underlying 3D-API directly, you must call:
  260. sg_reset_state_cache()
  261. ...before calling sokol_gfx functions again
  262. --- you can inspect the original sg_desc structure handed to sg_setup()
  263. by calling sg_query_desc(). This will return an sg_desc struct with
  264. the default values patched in instead of any zero-initialized values
  265. --- you can get a desc struct matching the creation attributes of a
  266. specific resource object via:
  267. sg_buffer_desc sg_query_buffer_desc(sg_buffer buf)
  268. sg_image_desc sg_query_image_desc(sg_image img)
  269. sg_sampler_desc sg_query_sampler_desc(sg_sampler smp)
  270. sg_shader_desc sq_query_shader_desc(sg_shader shd)
  271. sg_pipeline_desc sg_query_pipeline_desc(sg_pipeline pip)
  272. sg_view_desc sg_query_view_desc(sg_view view)
  273. ...but NOTE that the returned desc structs may be incomplete, only
  274. creation attributes that are kept around internally after resource
  275. creation will be filled in, and in some cases (like shaders) that's
  276. very little. Any missing attributes will be set to zero. The returned
  277. desc structs might still be useful as partial blueprint for creating
  278. similar resources if filled up with the missing attributes.
  279. Calling the query-desc functions on an invalid resource will return
  280. completely zeroed structs (it makes sense to check the resource state
  281. with sg_query_*_state() first)
  282. --- you can query the default resource creation parameters through the functions
  283. sg_buffer_desc sg_query_buffer_defaults(const sg_buffer_desc* desc)
  284. sg_image_desc sg_query_image_defaults(const sg_image_desc* desc)
  285. sg_sampler_desc sg_query_sampler_defaults(const sg_sampler_desc* desc)
  286. sg_shader_desc sg_query_shader_defaults(const sg_shader_desc* desc)
  287. sg_pipeline_desc sg_query_pipeline_defaults(const sg_pipeline_desc* desc)
  288. sg_view_desc sg_query_view_defaults(const sg_view_desc* desc)
  289. These functions take a pointer to a desc structure which may contain
  290. zero-initialized items for default values. These zero-init values
  291. will be replaced with their concrete values in the returned desc
  292. struct.
  293. --- you can inspect various internal resource runtime values via:
  294. sg_buffer_info sg_query_buffer_info(sg_buffer buf)
  295. sg_image_info sg_query_image_info(sg_image img)
  296. sg_sampler_info sg_query_sampler_info(sg_sampler smp)
  297. sg_shader_info sg_query_shader_info(sg_shader shd)
  298. sg_pipeline_info sg_query_pipeline_info(sg_pipeline pip)
  299. sg_view_info sg_query_view_info(sg_view view)
  300. ...please note that the returned info-structs are tied quite closely
  301. to sokol_gfx.h internals, and may change more often than other
  302. public API functions and structs.
  303. -- you can query the type/flavour and parent resource of a view:
  304. sg_view_type sg_query_view_type(sg_view view)
  305. sg_image sg_query_view_image(sg_view view)
  306. sg_buffer sg_query_view_buffer(sg_view view)
  307. --- you can query stats and control stats collection via:
  308. sg_query_stats()
  309. sg_enable_stats()
  310. sg_disable_stats()
  311. sg_stats_enabled()
  312. --- you can ask at runtime what backend sokol_gfx.h has been compiled for:
  313. sg_backend sg_query_backend(void)
  314. --- call the following helper functions to compute the number of
  315. bytes in a texture row or surface for a specific pixel format.
  316. These functions might be helpful when preparing image data for consumption
  317. by sg_make_image() or sg_update_image():
  318. int sg_query_row_pitch(sg_pixel_format fmt, int width, int int row_align_bytes);
  319. int sg_query_surface_pitch(sg_pixel_format fmt, int width, int height, int row_align_bytes);
  320. Width and height are generally in number pixels, but note that 'row' has different meaning
  321. for uncompressed vs compressed pixel formats: for uncompressed formats, a row is identical
  322. with a single line if pixels, while in compressed formats, one row is a line of *compression blocks*.
  323. This is why calling sg_query_surface_pitch() for a compressed pixel format and height
  324. N, N+1, N+2, ... may return the same result.
  325. The row_align_bytes parameter is for added flexibility. For image data that goes into
  326. the sg_make_image() or sg_update_image() this should generally be 1, because these
  327. functions take tightly packed image data as input no matter what alignment restrictions
  328. exist in the backend 3D APIs.
  329. ON INITIALIZATION:
  330. ==================
  331. When calling sg_setup(), a pointer to an sg_desc struct must be provided
  332. which contains initialization options. These options provide two types
  333. of information to sokol-gfx:
  334. (1) upper bounds and limits needed to allocate various internal
  335. data structures:
  336. - the max number of resources of each type that can
  337. be alive at the same time, this is used for allocating
  338. internal pools
  339. - the max overall size of uniform data that can be
  340. updated per frame, including a worst-case alignment
  341. per uniform update (this worst-case alignment is 256 bytes)
  342. - the max size of all dynamic resource updates (sg_update_buffer,
  343. sg_append_buffer and sg_update_image) per frame
  344. - the max number of compute-dispatch calls in a compute pass
  345. Not all of those limit values are used by all backends, but it is
  346. good practice to provide them none-the-less.
  347. (2) 3D backend "environment information" in a nested sg_environment struct:
  348. - pointers to backend-specific context- or device-objects (for instance
  349. the D3D11, WebGPU or Metal device objects)
  350. - defaults for external swapchain pixel formats and sample counts,
  351. these will be used as default values in image and pipeline objects,
  352. and the sg_swapchain struct passed into sg_begin_pass()
  353. Usually you provide a complete sg_environment struct through
  354. a helper function, as an example look at the sglue_environment()
  355. function in the sokol_glue.h header.
  356. See the documentation block of the sg_desc struct below for more information.
  357. ON RENDER PASSES
  358. ================
  359. Relevant samples:
  360. - https://floooh.github.io/sokol-html5/offscreen-sapp.html
  361. - https://floooh.github.io/sokol-html5/offscreen-msaa-sapp.html
  362. - https://floooh.github.io/sokol-html5/mrt-sapp.html
  363. - https://floooh.github.io/sokol-html5/mrt-pixelformats-sapp.html
  364. A render pass groups rendering commands into a set of render target images
  365. (called 'render pass attachments'). Render target images can be used in subsequent
  366. passes as textures (it is invalid to use the same image both as render target
  367. and as texture in the same pass).
  368. The following sokol-gfx functions must only be called inside a render-pass:
  369. sg_apply_viewport[f]
  370. sg_apply_scissor_rect[f]
  371. sg_draw
  372. The following function may be called inside a render- or compute-pass, but
  373. not outside a pass:
  374. sg_apply_pipeline
  375. sg_apply_bindings
  376. sg_apply_uniforms
  377. A frame must have at least one 'swapchain render pass' which renders into an
  378. externally provided swapchain provided as an sg_swapchain struct to the
  379. sg_begin_pass() function. If you use sokol_gfx.h together with sokol_app.h,
  380. just call the sglue_swapchain() helper function in sokol_glue.h to
  381. provide the swapchain information. Otherwise the following information
  382. must be provided:
  383. - the color pixel-format of the swapchain's render surface
  384. - an optional depth/stencil pixel format if the swapchain
  385. has a depth/stencil buffer
  386. - an optional sample-count for MSAA rendering
  387. - NOTE: the above three values can be zero-initialized, in that
  388. case the defaults from the sg_environment struct will be used that
  389. had been passed to the sg_setup() function.
  390. - a number of backend specific objects:
  391. - GL/GLES3: just a GL framebuffer handle
  392. - D3D11:
  393. - an ID3D11RenderTargetView for the rendering surface
  394. - if MSAA is used, an ID3D11RenderTargetView as
  395. MSAA resolve-target
  396. - an optional ID3D11DepthStencilView for the
  397. depth/stencil buffer
  398. - WebGPU
  399. - a WGPUTextureView object for the rendering surface
  400. - if MSAA is used, a WGPUTextureView object as MSAA resolve target
  401. - an optional WGPUTextureView for the
  402. - Metal (NOTE that the roles of provided surfaces is slightly
  403. different in Metal than in D3D11 or WebGPU, notably, the
  404. CAMetalDrawable is either rendered to directly, or serves
  405. as MSAA resolve target):
  406. - a CAMetalDrawable object which is either rendered
  407. into directly, or in case of MSAA rendering, serves
  408. as MSAA-resolve-target
  409. - if MSAA is used, an multisampled MTLTexture where
  410. rendering goes into
  411. - an optional MTLTexture for the depth/stencil buffer
  412. It's recommended that you create a helper function which returns an
  413. initialized sg_swapchain struct by value. This can then be directly plugged
  414. into the sg_begin_pass function like this:
  415. sg_begin_pass(&(sg_pass){ .swapchain = sglue_swapchain() });
  416. As an example for such a helper function check out the function sglue_swapchain()
  417. in the sokol_glue.h header.
  418. For offscreen render passes, the render target images used in a render pass
  419. must be provided as sg_view objects specialized for the specific pass-attachment
  420. types:
  421. - color-attachment-views for color-rendering
  422. - depth-stencil-attachment-views for the depth-stencil-buffer surface
  423. - resolve-attachment-views for MSAA-resolve operations
  424. For a simple offscreen scenario with one color-, one depth-stencil-render
  425. target and without multisampling, setting up the required image-
  426. and view-objects looks like this:
  427. First create two render target images, one with a color pixel format,
  428. and one with the depth- or depth-stencil pixel format. Both images
  429. must have the same dimensions. Also not the usage flags:
  430. const sg_image color_img = sg_make_image(&(sg_image_desc){
  431. .usage.color_attachment = true,
  432. .width = 256,
  433. .height = 256,
  434. .pixel_format = SG_PIXELFORMAT_RGBA8,
  435. .sample_count = 1,
  436. });
  437. const sg_image depth_img = sg_make_image(&(sg_image_desc){
  438. .usage.depth_stencil_attachment = true,
  439. .width = 256,
  440. .height = 256,
  441. .pixel_format = SG_PIXELFORMAT_DEPTH,
  442. .sample_count = 1,
  443. });
  444. NOTE: when creating render target images, have in mind that some default values
  445. are aligned with the default environment attributes in the sg_environment struct
  446. that was passed into the sg_setup() call:
  447. - the default value for sg_image_desc.pixel_format is taken from
  448. sg_environment.defaults.color_format
  449. - the default value for sg_image_desc.sample_count is taken from
  450. sg_environment.defaults.sample_count
  451. - the default value for sg_image_desc.num_mipmaps is always 1
  452. Next, create two view objects, one color-attachment-view and one
  453. depth-stencil-attachment view:
  454. const sg_view color_att_view = sg_make_view(&(sg_view_desc){
  455. .color_attachment.image = color_img,
  456. });
  457. const sg_view depth_att_view = sg_make_view(&(sg_view_desc){
  458. .depth_stencil_attachment.image = depth_img,
  459. });
  460. You'll typically also want to create a texture-view on the color image
  461. to sample the color attachment image as texture in a later pass:
  462. const sg_view tex_view = sg_make_view(&(sg_view_desc){
  463. .texture.image = color_img,
  464. });
  465. The attachment-view objects are then passed into the sg_begin_pass function in
  466. place of the nested swapchain struct:
  467. sg_begin_pass(&(sg_pass){
  468. .attachments = {
  469. .colors[0] = color_att_view,
  470. .depth_stencil = depth_att_view,
  471. },
  472. });
  473. ...in a later pass when you want to sample the color attachment image as
  474. texture, use the texture view in the sg_apply_bindings() call:
  475. sg_apply_bindings(&(sg_bindings){
  476. .vertex_buffers[0] = ...,
  477. .index_buffer = ...,
  478. .views[VIEW_tex] = tex_view,
  479. .samplers[SMP_smp] = smp,
  480. });
  481. Swapchain and offscreen passes form dependency trees with a swapchain
  482. pass at the root, offscreen passes as nodes, and attachment images as
  483. dependencies between passes.
  484. sg_pass_action structs are used to define actions that should happen at the
  485. start and end of render passes (such as clearing pass attachments to a
  486. specific color or depth-value, or performing an MSAA resolve operation at
  487. the end of a pass).
  488. A typical sg_pass_action object which clears the color attachment to black
  489. might look like this:
  490. const sg_pass_action = {
  491. .colors[0] = {
  492. .load_action = SG_LOADACTION_CLEAR,
  493. .clear_value = { 0.0f, 0.0f, 0.0f, 1.0f }
  494. }
  495. };
  496. This omits the defaults for the color attachment store action, and
  497. the depth-stencil-attachments actions. The same pass action with the
  498. defaults explicitly filled in would look like this:
  499. const sg_pass_action pass_action = {
  500. .colors[0] = {
  501. .load_action = SG_LOADACTION_CLEAR,
  502. .store_action = SG_STOREACTION_STORE,
  503. .clear_value = { 0.0f, 0.0f, 0.0f, 1.0f }
  504. },
  505. .depth = = {
  506. .load_action = SG_LOADACTION_CLEAR,
  507. .store_action = SG_STOREACTION_DONTCARE,
  508. .clear_value = 1.0f,
  509. },
  510. .stencil = {
  511. .load_action = SG_LOADACTION_CLEAR,
  512. .store_action = SG_STOREACTION_DONTCARE,
  513. .clear_value = 0
  514. }
  515. };
  516. With the sg_pass object and sg_pass_action struct in place everything
  517. is ready now for the actual render pass:
  518. Using such this prepared sg_pass_action in a swapchain pass looks like
  519. this:
  520. sg_begin_pass(&(sg_pass){
  521. .action = pass_action,
  522. .swapchain = sglue_swapchain()
  523. });
  524. ...
  525. sg_end_pass();
  526. ...of alternatively in one offscreen pass:
  527. sg_begin_pass(&(sg_pass){
  528. .action = pass_action,
  529. .attachments = {
  530. .colors[0] = color_att_view,
  531. .depth_stencil = ds_att_view,
  532. },
  533. });
  534. ...
  535. sg_end_pass();
  536. Offscreen rendering can also go into a mipmap, or a slice/face of
  537. a cube-, array- or 3d-image (which some restrictions, for instance
  538. it's not possible to create a 3D image with a depth/stencil pixel format,
  539. these exceptions are generally caught by the sokol-gfx validation layer).
  540. The mipmap/slice selection is baked into the attachment-view objects, for
  541. instance to create a color-attachment-view for rendering into mip-level
  542. 2 and slice 3 of an array texture:
  543. const sg_view color_att_view = sg_make_view(&(sg_view_desc){
  544. .color_attachment = {
  545. .image = color_img,
  546. .mip_level = 2,
  547. .slice = 3,
  548. },
  549. });
  550. If MSAA offscreen rendering is desired, the multi-sample rendering result
  551. must be 'resolved' into a separate 'resolve image', before that image can
  552. be used as texture.
  553. Setting up MSAA offscreen 3D rendering requires three image objects
  554. (one color-attachment image with a sample count > 1), a resolve-attachment
  555. image with a sample count of 1, and a depth-stencil-attachment image
  556. with the same sample count as the color-attachment image:
  557. const sg_image color_img = sg_make_image(&(sg_image_desc){
  558. .usage.color_attachment = true,
  559. .width = 256,
  560. .height = 256,
  561. .pixel_format = SG_PIXELFORMAT_RGBA8,
  562. .sample_count = 4,
  563. });
  564. const sg_image resolve_img = sg_make_image(&(sg_image_desc){
  565. .usage.resolve_attachment = true,
  566. .width = 256,
  567. .height = 256,
  568. .pixel_format = SG_PIXELFORMAT_RGBA8,
  569. .sample_count = 1,
  570. });
  571. const sg_image depth_img = sg_make_image(&(sg_image_desc){
  572. .usage.depth_stencil_attachment = true,
  573. .width = 256,
  574. .height = 256,
  575. .pixel_format = SG_PIXELFORMAT_DEPTH,
  576. .sample_count = 4,
  577. });
  578. Next you'll need the corresponding attachment-view objects:
  579. const sg_view color_att_view = sg_make_view(&(sg_view_desc){
  580. .color_attachment.image = color_img,
  581. });
  582. const sg_view resolve_att_view = sg_make_view(&(sg_view_desc){
  583. .resolve_attachment.image = resolve_img,
  584. });
  585. const sg_view depth_att_view = sg_make_view(&(sg_view_desc){
  586. .depth_stencil_attachment.image = depth_img,
  587. });
  588. To sample the rendered image as a texture in a later pass you'll also
  589. need a texture-view on the resolve-attachment-image (not the color-attachment-image!):
  590. const sg_view tex_view = sg_make_view(&(sg_view_desc){
  591. .texture.image = resolve_img,
  592. });
  593. Next start the render pass with all attachment-views, as soon as a
  594. resolve-attachment-view is provided, an MSAA resolve operation will happen
  595. at the end of the pass. Also note that the content of the MSAA color-attachment-image
  596. doesn't need to be preserved, since it's only needed until the MSAA-resolve
  597. at the end of the pass, so the .store_action should be set to "don't care":
  598. sg_begin_pass(&(sg_pass){
  599. .attachments = {
  600. .colors[0] = color_att_view,
  601. .resolves[0] = resolve_att_view,
  602. .depth_stencil = depth_att_view,
  603. },
  604. .action = {
  605. .colors[0] = {
  606. .load_action = SG_LOADACTION_CLEAR,
  607. .store_action = SG_STOREACTION_DONTCARE,
  608. .clear_value = { 0.0f, 0.0f, 0.0f, 1.0f },
  609. }
  610. },
  611. });
  612. ...in a later pass, use the texture-view that had been created on the
  613. resolve-image to use the rendering result as texture:
  614. sg_apply_bindings(&(sg_bindings){
  615. .vertex_buffers[0] = ...,
  616. .index_buffer = ...,
  617. .views[VIEW_tex] = tex_view,
  618. .samplers[SMP_smp] = smp,
  619. });
  620. ON COMPUTE PASSES
  621. =================
  622. Compute passes are used to update the content of storage buffers and
  623. storage images by running compute shader code on
  624. the GPU. Updating storage resources with a compute shader will almost always
  625. be more efficient than computing the same data on the CPU and then uploading
  626. it via `sg_update_buffer()` or `sg_update_image()`.
  627. NOTE: compute passes are only supported on the following platforms and
  628. backends:
  629. - macOS and iOS with Metal
  630. - Windows with D3D11 and OpenGL
  631. - Linux with OpenGL or GLES3.1+
  632. - Web with WebGPU
  633. - Android with GLES3.1+
  634. ...this means compute shaders can't be used on the following platform/backend
  635. combos (the same restrictions apply to using storage buffers without compute
  636. shaders):
  637. - macOS with GL
  638. - iOS with GLES3
  639. - Web with WebGL2
  640. A compute pass is started with:
  641. sg_begin_pass(&(sg_pass){ .compute = true });
  642. ...and finished with a regular:
  643. sg_end_pass();
  644. Typically the following functions will be called inside a compute pass:
  645. sg_apply_pipeline()
  646. sg_apply_bindings()
  647. sg_apply_uniforms()
  648. sg_dispatch()
  649. The following functions are disallowed inside a compute pass
  650. and will cause validation layer errors:
  651. sg_apply_viewport[f]()
  652. sg_apply_scissor_rect[f]()
  653. sg_draw()
  654. Only special 'compute shaders' and 'compute pipelines' can be used in
  655. compute passes. A compute shader only has a compute-function instead
  656. of a vertex- and fragment-function pair, and it doesn't accept vertex-
  657. and index-buffers as bindings, only storage-buffer-views (readable
  658. and writable), storage-image-views (read/write or writeonly) and
  659. texture-views (read-only).
  660. A compute pipeline is created by providing a compute shader object,
  661. setting the .compute creation parameter to true and not defining any
  662. 'render state':
  663. sg_pipeline pip = sg_make_pipeline(&(sg_pipeline_desc){
  664. .compute = true,
  665. .shader = compute_shader,
  666. });
  667. The sg_apply_bindings and sg_apply_uniforms calls are the same as in
  668. render passes, with the exception that no vertex- and index-buffers
  669. can be bound in the sg_apply_bindings call.
  670. Finally to kick off a compute workload, call sg_dispatch with the
  671. number of workgroups in the x, y and z-dimension:
  672. sg_dispatch(int num_groups_x, int num_groups_y, int num_groups_z)
  673. Also see the following compute-shader samples:
  674. - https://floooh.github.io/sokol-webgpu/instancing-compute-sapp.html
  675. - https://floooh.github.io/sokol-webgpu/computeboids-sapp.html
  676. - https://floooh.github.io/sokol-webgpu/imageblur-sapp.html
  677. ON SHADER CREATION
  678. ==================
  679. sokol-gfx doesn't come with an integrated shader cross-compiler, instead
  680. backend-specific shader sources or binary blobs need to be provided when
  681. creating a shader object, along with reflection information about the
  682. shader resource binding interface needed to bind sokol-gfx resources to the
  683. proper shader inputs.
  684. The easiest way to provide all this shader creation data is to use the
  685. sokol-shdc shader compiler tool to compile shaders from a common
  686. GLSL syntax into backend-specific sources or binary blobs, along with
  687. shader interface information and uniform blocks and storage buffer array items
  688. mapped to C structs.
  689. To create a shader using a C header which has been code-generated by sokol-shdc:
  690. // include the C header code-generated by sokol-shdc:
  691. #include "myshader.glsl.h"
  692. ...
  693. // create shader using a code-generated helper function from the C header:
  694. sg_shader shd = sg_make_shader(myshader_shader_desc(sg_query_backend()));
  695. The samples in the 'sapp' subdirectory of the sokol-samples project
  696. also use the sokol-shdc approach:
  697. https://github.com/floooh/sokol-samples/tree/master/sapp
  698. If you're planning to use sokol-shdc, you can stop reading here, instead
  699. continue with the sokol-shdc documentation:
  700. https://github.com/floooh/sokol-tools/blob/master/docs/sokol-shdc.md
  701. To create shaders with backend-specific shader code or binary blobs,
  702. the sg_make_shader() function requires the following information:
  703. - Shader code or shader binary blobs for the vertex- and fragment-, or the
  704. compute-shader-stage:
  705. - for the desktop GL backend, source code can be provided in '#version 410' or
  706. '#version 430', version 430 is required when using storage buffers and
  707. compute shaders, but note that this is not available on macOS
  708. - for the GLES3 backend, source code must be provided in '#version 300 es' or
  709. '#version 310 es' syntax (version 310 is required for storage buffer and
  710. compute shader support, but note that this is not supported on WebGL2)
  711. - for the D3D11 backend, shaders can be provided as source or binary
  712. blobs, the source code should be in HLSL4.0 (for compatibility with old
  713. low-end GPUs) or preferably in HLSL5.0 syntax, note that when
  714. shader source code is provided for the D3D11 backend, sokol-gfx will
  715. dynamically load 'd3dcompiler_47.dll'
  716. - for the Metal backends, shaders can be provided as source or binary blobs, the
  717. MSL version should be in 'metal-1.1' (other versions may work but are not tested)
  718. - for the WebGPU backend, shaders must be provided as WGSL source code
  719. - optionally the following shader-code related attributes can be provided:
  720. - an entry function name (only on D3D11 or Metal, but not OpenGL)
  721. - on D3D11 only, a compilation target (default is "vs_4_0" and "ps_4_0")
  722. - Information about the input vertex attributes used by the vertex shader,
  723. most of that backend-specific:
  724. - An optional 'base type' (float, signed-/unsigned-int) for each vertex
  725. attribute. When provided, this is used by the validation layer to check
  726. that the CPU-side input vertex format is compatible with the input
  727. vertex declaration of the vertex shader.
  728. - Metal: no location information needed since vertex attributes are always bound
  729. by their attribute location defined in the shader via '[[attribute(N)]]'
  730. - WebGPU: no location information needed since vertex attributes are always
  731. bound by their attribute location defined in the shader via `@location(N)`
  732. - GLSL: vertex attribute names can be optionally provided, in that case their
  733. location will be looked up by name, otherwise, the vertex attribute location
  734. can be defined with 'layout(location = N)'
  735. - D3D11: a 'semantic name' and 'semantic index' must be provided for each vertex
  736. attribute, e.g. if the vertex attribute is defined as 'TEXCOORD1' in the shader,
  737. the semantic name would be 'TEXCOORD', and the semantic index would be '1'
  738. NOTE that vertex attributes currently must not have gaps. This requirement
  739. may be relaxed in the future.
  740. - Specifically for Metal compute shaders, the 'number of threads per threadgroup'
  741. must be provided. Normally this is extracted by sokol-shdc from the GLSL
  742. shader source code. For instance the following statement in the input
  743. GLSL:
  744. layout(local_size_x=64, local_size_y=1, local_size_z=1) in;
  745. ...will be communicated to the sokol-gfx Metal backend in the
  746. code-generated sg_shader_desc struct:
  747. (sg_shader_desc){
  748. .mtl_threads_per_threadgroup = { .x = 64, .y = 1, .z = 1 },
  749. }
  750. - Information about each uniform block binding used in the shader:
  751. - the shader stage of the uniform block (vertex, fragment or compute)
  752. - the size of the uniform block in number of bytes
  753. - a memory layout hint (currently 'native' or 'std140') where 'native' defines a
  754. backend-specific memory layout which shouldn't be used for cross-platform code.
  755. Only std140 guarantees a backend-agnostic memory layout.
  756. - a backend-specific bind slot:
  757. - D3D11/HLSL: the buffer register N (`register(bN)`) where N is 0..7
  758. - Metal/MSL: the buffer bind slot N (`[[buffer(N)]]`) where N is 0..7
  759. - WebGPU: the binding N in `@group(0) @binding(N)` where N is 0..15
  760. - For GLSL only: a description of the internal uniform block layout, which maps
  761. member types and their offsets on the CPU side to uniform variable names
  762. in the GLSL shader
  763. - please also NOTE the documentation sections about UNIFORM DATA LAYOUT
  764. and CROSS-BACKEND COMMON UNIFORM DATA LAYOUT below!
  765. - A description of each resource binding (texture-, storage-buffer-
  766. and storage-image-bindings) which directly map to the sg_bindings.view[]
  767. array slots.
  768. Each resource binding slot comes in three flavours:
  769. 1. Texture bindings with the following properties:
  770. - the shader stage of the texture (vertex, fragment or compute)
  771. - the expected image type:
  772. - SG_IMAGETYPE_2D
  773. - SG_IMAGETYPE_CUBE
  774. - SG_IMAGETYPE_3D
  775. - SG_IMAGETYPE_ARRAY
  776. - the expected 'image sample type':
  777. - SG_IMAGESAMPLETYPE_FLOAT
  778. - SG_IMAGESAMPLETYPE_DEPTH
  779. - SG_IMAGESAMPLETYPE_SINT
  780. - SG_IMAGESAMPLETYPE_UINT
  781. - SG_IMAGESAMPLETYPE_UNFILTERABLE_FLOAT
  782. - a flag whether the texture is expected to be multisampled
  783. - a backend-specific bind slot:
  784. - D3D11/HLSL: the texture register N (`register(tN)`) where N is 0..31
  785. (in HLSL, readonly storage buffers and texture share the same bind space)
  786. - Metal/MSL: the texture bind slot N (`[[texture(N)]]`) where N is 0..31
  787. (the bind slot must not collide with storage image bindings on the same stage)
  788. - WebGPU/WGSL: the binding N in `@group(0) @binding(N)` where N is 0..127
  789. 2. Storage buffer bindings with the following properties:
  790. - the shader stage of the storage buffer
  791. - a boolean 'readonly' flag, this is used for validation and hazard
  792. tracking in some 3D backends. Note that in render passes, only
  793. readonly storage buffer bindings are allowed. In compute passes, any
  794. read/write storage buffer binding is assumed to be written to by the
  795. compute shader.
  796. - a backend-specific bind slot:
  797. - D3D11/HLSL:
  798. - for readonly storage buffer bindings: the texture register N
  799. (`register(tN)`) where N is 0..31 (in HLSL, readonly storage
  800. buffers and textures share the same bind space for
  801. 'shader resource views')
  802. - for read/write storage buffer buffer bindings: the UAV register N
  803. (`register(uN)`) where N is 0..31 (in HLSL, readwrite storage
  804. buffers use their own bind space for 'unordered access views')
  805. - Metal/MSL: the buffer bind slot N (`[[buffer(N)]]`) where N is 8..23
  806. - WebGPU/WGSL: the binding N in `@group(0) @binding(N)` where N is 0..127
  807. - GL/GLSL: the buffer binding N in `layout(binding=N)`
  808. where N is 0..sg_limits.max_storage_buffer_bindings_per_stage
  809. - note that storage buffer bindings are not supported on all backends
  810. and platforms
  811. 3. Storage image bindings with the following properties:
  812. - the shader stage (*must* be compute)
  813. - the expected image type:
  814. - SG_IMAGETYPE_2D
  815. - SG_IMAGETYPE_CUBE
  816. - SG_IMAGETYPE_3D
  817. - SG_IMAGETYPE_ARRAY
  818. - the 'access pixel format', this is currently limited to:
  819. - SG_PIXELFORMAT_RGBA8
  820. - SG_PIXELFORMAT_RGBA8SN/UI/SI
  821. - SG_PIXELFORMAT_RGBA16UI/SI/F
  822. - SG_PIXELFORMAT_R32UIUI/SI/F
  823. - SG_PIXELFORMAT_RG32UI/SI/F
  824. - SG_PIXELFORMAT_RGBA32UI/SI/F
  825. - the access type (readwrite or writeonly)
  826. - a backend-specific bind slot:
  827. - D3D11/HLSL: the UAV register N (`register(uN)` where N is 0..31, the
  828. bind slot must not collide with UAV storage buffer bindings
  829. - Metal/MSL: the texture bind slot N (`[[texture(N)]])` where N is 0..31,
  830. the bind slot must not collide with other texture bindings on the same
  831. stage
  832. - WebGPU/WGSL: the binding N in `@group(1) @binding(N)` where N is 0..127
  833. - GL/GLSL: the buffer binding N in `layout(binding=N)`
  834. where N is 0.._sg.max_storage_image_bindings_per_stage
  835. - note that storage image bindings are not supported on all backends and platforms
  836. - A description of each sampler used in the shader:
  837. - the shader stage of the sampler (vertex, fragment or compute)
  838. - the expected sampler type:
  839. - SG_SAMPLERTYPE_FILTERING,
  840. - SG_SAMPLERTYPE_NONFILTERING,
  841. - SG_SAMPLERTYPE_COMPARISON,
  842. - a backend-specific bind slot:
  843. - D3D11/HLSL: the sampler register N (`register(sN)`) where N is 0..SG_MAX_SAMPLER_BINDINGS
  844. - Metal/MSL: the sampler bind slot N (`[[sampler(N)]]`) where N is 0..SG_MAX_SAMPLER_BINDINGS
  845. - WebGPU/WGSL: the binding N in `@group(0) @binding(N)` where N is 0..127
  846. - An array of 'texture-sampler-pairs' used by the shader to sample textures,
  847. for D3D11, Metal and WebGPU this is used for validation purposes to check
  848. whether the texture and sampler are compatible with each other (especially
  849. WebGPU is very picky about combining the correct
  850. texture-sample-type with the correct sampler-type). For GLSL an
  851. additional 'combined-image-sampler name' must be provided because 'OpenGL
  852. style GLSL' cannot handle separate texture and sampler objects, but still
  853. groups them into a traditional GLSL 'sampler object'.
  854. Compatibility rules for image-sample-type vs sampler-type are as follows:
  855. - SG_IMAGESAMPLETYPE_FLOAT => (SG_SAMPLERTYPE_FILTERING or SG_SAMPLERTYPE_NONFILTERING)
  856. - SG_IMAGESAMPLETYPE_UNFILTERABLE_FLOAT => SG_SAMPLERTYPE_NONFILTERING
  857. - SG_IMAGESAMPLETYPE_SINT => SG_SAMPLERTYPE_NONFILTERING
  858. - SG_IMAGESAMPLETYPE_UINT => SG_SAMPLERTYPE_NONFILTERING
  859. - SG_IMAGESAMPLETYPE_DEPTH => SG_SAMPLERTYPE_COMPARISON
  860. Backend-specific bindslot ranges (not relevant when using sokol-shdc):
  861. - D3D11/HLSL:
  862. - separate bindslot space per shader stage
  863. - uniform block bindings (as cbuffer): `register(b0..b7)`
  864. - texture- and readonly storage buffer bindings: `register(t0..t31)`
  865. - read/write storage buffer and storage image bindings: `register(u0..u31)`
  866. - samplers: `register(s0..s11)`
  867. - Metal/MSL:
  868. - separate bindslot space per shader stage
  869. - uniform blocks: `[[buffer(0..7)]]`
  870. - storage buffers: `[[buffer(8..23)]]`
  871. - textures and storage image bindings: `[[texture(0..31)]]`
  872. - samplers: `[[sampler(0..11)]]`
  873. - WebGPU/WGSL:
  874. - common bindslot space across shader stages
  875. - uniform blocks: `@group(0) @binding(0..15)`
  876. - textures, storage-images, storage-buffers and sampler: `@group(1) @binding(0..127)`
  877. - GL/GLSL:
  878. - uniforms and image-samplers are bound by name
  879. - storage buffer bindings: `layout(std430, binding=0..sg_limits.max_storage_buffer_bindings_per_stage` (common
  880. bindslot space across shader stages)
  881. - storage image bindings: `layout(binding=0..sg_limits.max_storage_image_bindings_per_stage, [access_format])`
  882. For example code of how to create backend-specific shader objects,
  883. please refer to the following samples:
  884. - for D3D11: https://github.com/floooh/sokol-samples/tree/master/d3d11
  885. - for Metal: https://github.com/floooh/sokol-samples/tree/master/metal
  886. - for OpenGL: https://github.com/floooh/sokol-samples/tree/master/glfw
  887. - for GLES3: https://github.com/floooh/sokol-samples/tree/master/html5
  888. - for WebGPU: https://github.com/floooh/sokol-samples/tree/master/wgpu
  889. ON SG_IMAGESAMPLETYPE_UNFILTERABLE_FLOAT AND SG_SAMPLERTYPE_NONFILTERING
  890. ========================================================================
  891. The WebGPU backend introduces the concept of 'unfilterable-float' textures,
  892. which can only be combined with 'nonfiltering' samplers (this is a restriction
  893. specific to WebGPU, but since the same sokol-gfx code should work across
  894. all backend, the sokol-gfx validation layer also enforces this restriction
  895. - the alternative would be undefined behaviour in some backend APIs on
  896. some devices).
  897. The background is that some mobile devices (most notably iOS devices) can
  898. not perform linear filtering when sampling textures with certain pixel
  899. formats, most notable the 32F formats:
  900. - SG_PIXELFORMAT_R32F
  901. - SG_PIXELFORMAT_RG32F
  902. - SG_PIXELFORMAT_RGBA32F
  903. The information of whether a shader is going to be used with such an
  904. unfilterable-float texture must already be provided in the sg_shader_desc
  905. struct when creating the shader (see the above section "ON SHADER CREATION").
  906. If you are using the sokol-shdc shader compiler, the information whether a
  907. texture/sampler binding expects an 'unfilterable-float/nonfiltering'
  908. texture/sampler combination cannot be inferred from the shader source
  909. alone, you'll need to provide this hint via annotation-tags. For instance
  910. here is an example from the ozz-skin-sapp.c sample shader which samples an
  911. RGBA32F texture with skinning matrices in the vertex shader:
  912. ```glsl
  913. @image_sample_type joint_tex unfilterable_float
  914. uniform texture2D joint_tex;
  915. @sampler_type smp nonfiltering
  916. uniform sampler smp;
  917. ```
  918. This will result in SG_IMAGESAMPLETYPE_UNFILTERABLE_FLOAT and
  919. SG_SAMPLERTYPE_NONFILTERING being written to the code-generated
  920. sg_shader_desc struct.
  921. ON VERTEX FORMATS
  922. =================
  923. Sokol-gfx implements the same strict mapping rules from CPU-side
  924. vertex component formats to GPU-side vertex input data types:
  925. - float and packed normalized CPU-side formats must be used as
  926. floating point base type in the vertex shader
  927. - packed signed-integer CPU-side formats must be used as signed
  928. integer base type in the vertex shader
  929. - packed unsigned-integer CPU-side formats must be used as unsigned
  930. integer base type in the vertex shader
  931. These mapping rules are enforced by the sokol-gfx validation layer,
  932. but only when sufficient reflection information is provided in
  933. `sg_shader_desc.attrs[].base_type`. This is the case when sokol-shdc
  934. is used, otherwise the default base_type will be SG_SHADERATTRBASETYPE_UNDEFINED
  935. which causes the sokol-gfx validation check to be skipped (of course you
  936. can also provide the per-attribute base type information manually when
  937. not using sokol-shdc).
  938. The detailed mapping rules from SG_VERTEXFORMAT_* to GLSL data types
  939. are as follows:
  940. - FLOAT[*] => float, vec*
  941. - BYTE4N => vec* (scaled to -1.0 .. +1.0)
  942. - UBYTE4N => vec* (scaled to 0.0 .. +1.0)
  943. - SHORT[*]N => vec* (scaled to -1.0 .. +1.0)
  944. - USHORT[*]N => vec* (scaled to 0.0 .. +1.0)
  945. - INT[*] => int, ivec*
  946. - UINT[*] => uint, uvec*
  947. - BYTE4 => int*
  948. - UBYTE4 => uint*
  949. - SHORT[*] => int*
  950. - USHORT[*] => uint*
  951. NOTE that sokol-gfx only provides vertex formats with sizes of a multiple
  952. of 4 (e.g. BYTE4N but not BYTE2N). This is because vertex components must
  953. be 4-byte aligned anyway.
  954. UNIFORM DATA LAYOUT:
  955. ====================
  956. NOTE: if you use the sokol-shdc shader compiler tool, you don't need to worry
  957. about the following details.
  958. The data that's passed into the sg_apply_uniforms() function must adhere to
  959. specific layout rules so that the GPU shader finds the uniform block
  960. items at the right offset.
  961. For the D3D11 and Metal backends, sokol-gfx only cares about the size of uniform
  962. blocks, but not about the internal layout. The data will just be copied into
  963. a uniform/constant buffer in a single operation and it's up you to arrange the
  964. CPU-side layout so that it matches the GPU side layout. This also means that with
  965. the D3D11 and Metal backends you are not limited to a 'cross-platform' subset
  966. of uniform variable types.
  967. If you ever only use one of the D3D11, Metal *or* WebGPU backend, you can stop reading here.
  968. For the GL backends, the internal layout of uniform blocks matters though,
  969. and you are limited to a small number of uniform variable types. This is
  970. because sokol-gfx must be able to locate the uniform block members in order
  971. to upload them to the GPU with glUniformXXX() calls.
  972. To describe the uniform block layout to sokol-gfx, the following information
  973. must be passed to the sg_make_shader() call in the sg_shader_desc struct:
  974. - a hint about the used packing rule (either SG_UNIFORMLAYOUT_NATIVE or
  975. SG_UNIFORMLAYOUT_STD140)
  976. - a list of the uniform block members types in the correct order they
  977. appear on the CPU side
  978. For example if the GLSL shader has the following uniform declarations:
  979. uniform mat4 mvp;
  980. uniform vec2 offset0;
  981. uniform vec2 offset1;
  982. uniform vec2 offset2;
  983. ...and on the CPU side, there's a similar C struct:
  984. typedef struct {
  985. float mvp[16];
  986. float offset0[2];
  987. float offset1[2];
  988. float offset2[2];
  989. } params_t;
  990. ...the uniform block description in the sg_shader_desc must look like this:
  991. sg_shader_desc desc = {
  992. .vs.uniform_blocks[0] = {
  993. .size = sizeof(params_t),
  994. .layout = SG_UNIFORMLAYOUT_NATIVE, // this is the default and can be omitted
  995. .uniforms = {
  996. // order must be the same as in 'params_t':
  997. [0] = { .name = "mvp", .type = SG_UNIFORMTYPE_MAT4 },
  998. [1] = { .name = "offset0", .type = SG_UNIFORMTYPE_VEC2 },
  999. [2] = { .name = "offset1", .type = SG_UNIFORMTYPE_VEC2 },
  1000. [3] = { .name = "offset2", .type = SG_UNIFORMTYPE_VEC2 },
  1001. }
  1002. }
  1003. };
  1004. With this information sokol-gfx can now compute the correct offsets of the data items
  1005. within the uniform block struct.
  1006. The SG_UNIFORMLAYOUT_NATIVE packing rule works fine if only the GL backends are used,
  1007. but for proper D3D11/Metal/GL a subset of the std140 layout must be used which is
  1008. described in the next section:
  1009. CROSS-BACKEND COMMON UNIFORM DATA LAYOUT
  1010. ========================================
  1011. For cross-platform / cross-3D-backend code it is important that the same uniform block
  1012. layout on the CPU side can be used for all sokol-gfx backends. To achieve this,
  1013. a common subset of the std140 layout must be used:
  1014. - The uniform block layout hint in sg_shader_desc must be explicitly set to
  1015. SG_UNIFORMLAYOUT_STD140.
  1016. - Only the following GLSL uniform types can be used (with their associated sokol-gfx enums):
  1017. - float => SG_UNIFORMTYPE_FLOAT
  1018. - vec2 => SG_UNIFORMTYPE_FLOAT2
  1019. - vec3 => SG_UNIFORMTYPE_FLOAT3
  1020. - vec4 => SG_UNIFORMTYPE_FLOAT4
  1021. - int => SG_UNIFORMTYPE_INT
  1022. - ivec2 => SG_UNIFORMTYPE_INT2
  1023. - ivec3 => SG_UNIFORMTYPE_INT3
  1024. - ivec4 => SG_UNIFORMTYPE_INT4
  1025. - mat4 => SG_UNIFORMTYPE_MAT4
  1026. - Alignment for those types must be as follows (in bytes):
  1027. - float => 4
  1028. - vec2 => 8
  1029. - vec3 => 16
  1030. - vec4 => 16
  1031. - int => 4
  1032. - ivec2 => 8
  1033. - ivec3 => 16
  1034. - ivec4 => 16
  1035. - mat4 => 16
  1036. - Arrays are only allowed for the following types: vec4, int4, mat4.
  1037. Note that the HLSL cbuffer layout rules are slightly different from the
  1038. std140 layout rules, this means that the cbuffer declarations in HLSL code
  1039. must be tweaked so that the layout is compatible with std140.
  1040. The by far easiest way to tackle the common uniform block layout problem is
  1041. to use the sokol-shdc shader cross-compiler tool!
  1042. ON STORAGE BUFFERS
  1043. ==================
  1044. The two main purpose of storage buffers are:
  1045. - to be populated by compute shaders with dynamically generated data
  1046. - for providing random-access data to all shader stages
  1047. Storage buffers can be used to pass large amounts of random access structured
  1048. data from the CPU side to the shaders. They are similar to data textures, but are
  1049. more convenient to use both on the CPU and shader side since they can be accessed
  1050. in shaders as as a 1-dimensional array of struct items.
  1051. Storage buffers are *NOT* supported on the following platform/backend combos:
  1052. - macOS+GL (because storage buffers require GL 4.3, while macOS only goes up to GL 4.1)
  1053. - platforms which only support a GLES3.0 context (WebGL2 and iOS)
  1054. To use storage buffers, the following steps are required:
  1055. - write a shader which uses storage buffers (vertex- and fragment-shaders
  1056. can only read from storage buffers, while compute-shaders can both read
  1057. and write storage buffers)
  1058. - create one or more storage buffers via sg_make_buffer() with the
  1059. `.usage.storage_buffer = true`
  1060. - when creating a shader via sg_make_shader(), populate the sg_shader_desc
  1061. struct with binding info (when using sokol-shdc, this step will be taken care
  1062. of automatically)
  1063. - which storage buffer bind slots on the vertex-, fragment- or compute-stage
  1064. are occupied
  1065. - whether the storage buffer on that bind slot is readonly (readonly
  1066. bindings are required for vertex- and fragment-shaders, and in compute
  1067. shaders the readonly flag is used to control hazard tracking in some
  1068. 3D backends)
  1069. - when calling sg_apply_bindings(), apply the matching bind slots with the previously
  1070. created storage buffers
  1071. - ...and that's it.
  1072. For more details, see the following backend-agnostic sokol samples:
  1073. - simple vertex pulling from a storage buffer:
  1074. - C code: https://github.com/floooh/sokol-samples/blob/master/sapp/vertexpull-sapp.c
  1075. - shader: https://github.com/floooh/sokol-samples/blob/master/sapp/vertexpull-sapp.glsl
  1076. - instanced rendering via storage buffers (vertex- and instance-pulling):
  1077. - C code: https://github.com/floooh/sokol-samples/blob/master/sapp/instancing-pull-sapp.c
  1078. - shader: https://github.com/floooh/sokol-samples/blob/master/sapp/instancing-pull-sapp.glsl
  1079. - storage buffers both on the vertex- and fragment-stage:
  1080. - C code: https://github.com/floooh/sokol-samples/blob/master/sapp/sbuftex-sapp.c
  1081. - shader: https://github.com/floooh/sokol-samples/blob/master/sapp/sbuftex-sapp.glsl
  1082. - the Ozz animation sample rewritten to pull all rendering data from storage buffers:
  1083. - C code: https://github.com/floooh/sokol-samples/blob/master/sapp/ozz-storagebuffer-sapp.cc
  1084. - shader: https://github.com/floooh/sokol-samples/blob/master/sapp/ozz-storagebuffer-sapp.glsl
  1085. - the instancing sample modified to use compute shaders:
  1086. - C code: https://github.com/floooh/sokol-samples/blob/master/sapp/instancing-compute-sapp.c
  1087. - shader: https://github.com/floooh/sokol-samples/blob/master/sapp/instancing-compute-sapp.glsl
  1088. - the Compute Boids sample ported to sokol-gfx:
  1089. - C code: https://github.com/floooh/sokol-samples/blob/master/sapp/computeboids-sapp.c
  1090. - shader: https://github.com/floooh/sokol-samples/blob/master/sapp/computeboids-sapp.glsl
  1091. ...also see the following backend-specific vertex pulling samples (those also don't use sokol-shdc):
  1092. - D3D11: https://github.com/floooh/sokol-samples/blob/master/d3d11/vertexpulling-d3d11.c
  1093. - desktop GL: https://github.com/floooh/sokol-samples/blob/master/glfw/vertexpulling-glfw.c
  1094. - Metal: https://github.com/floooh/sokol-samples/blob/master/metal/vertexpulling-metal.c
  1095. - WebGPU: https://github.com/floooh/sokol-samples/blob/master/wgpu/vertexpulling-wgpu.c
  1096. ...and the backend specific compute shader samples:
  1097. - D3D11: https://github.com/floooh/sokol-samples/blob/master/d3d11/instancing-compute-d3d11.c
  1098. - desktop GL: https://github.com/floooh/sokol-samples/blob/master/glfw/instancing-compute-glfw.c
  1099. - Metal: https://github.com/floooh/sokol-samples/blob/master/metal/instancing-compute-metal.c
  1100. - WebGPU: https://github.com/floooh/sokol-samples/blob/master/wgpu/instancing-compute-wgpu.c
  1101. Storage buffer shader authoring caveats when using sokol-shdc:
  1102. - declare a read-only storage buffer interface block with `layout(binding=N) readonly buffer [name] { ... }`
  1103. (where 'N' is the index in `sg_bindings.storage_buffers[N]`)
  1104. - ...or a read/write storage buffer interface block with `layout(binding=N) buffer [name] { ... }`
  1105. - declare a struct which describes a single array item in the storage buffer interface block
  1106. - only put a single flexible array member into the storage buffer interface block
  1107. E.g. a complete example in 'sokol-shdc GLSL':
  1108. ```glsl
  1109. @vs
  1110. // declare a struct:
  1111. struct sb_vertex {
  1112. vec3 pos;
  1113. vec4 color;
  1114. }
  1115. // declare a buffer interface block with a single flexible struct array:
  1116. layout(binding=0) readonly buffer vertices {
  1117. sb_vertex vtx[];
  1118. }
  1119. // in the shader function, access the storage buffer like this:
  1120. void main() {
  1121. vec3 pos = vtx[gl_VertexIndex].pos;
  1122. ...
  1123. }
  1124. @end
  1125. ```
  1126. In a compute shader you can read and write the same item in the same
  1127. storage buffer (but you'll have to be careful for random access since
  1128. many threads of the same compute function run in parallel):
  1129. @cs
  1130. struct sb_item {
  1131. vec3 pos;
  1132. vec3 vel;
  1133. }
  1134. layout(binding=0) buffer items_ssbo {
  1135. sb_item items[];
  1136. }
  1137. layout(local_size_x=64, local_size_y=1, local_size_z=1) in;
  1138. void main() {
  1139. uint idx = gl_GlobalInvocationID.x;
  1140. vec3 pos = items[idx].pos;
  1141. ...
  1142. items[idx].pos = pos;
  1143. }
  1144. @end
  1145. Backend-specific storage-buffer caveats (not relevant when using sokol-shdc):
  1146. D3D11:
  1147. - storage buffers are created as 'raw' Byte Address Buffers
  1148. (https://learn.microsoft.com/en-us/windows/win32/direct3d11/overviews-direct3d-11-resources-intro#raw-views-of-buffers)
  1149. - in HLSL, use a ByteAddressBuffer for readonly access of the buffer content:
  1150. (https://learn.microsoft.com/en-us/windows/win32/direct3dhlsl/sm5-object-byteaddressbuffer)
  1151. - ...or RWByteAddressBuffer for read/write access:
  1152. (https://learn.microsoft.com/en-us/windows/win32/direct3dhlsl/sm5-object-rwbyteaddressbuffer)
  1153. - readonly-storage buffers and textures are both bound as 'shader-resource-view' and
  1154. share the same bind slots (declared as `register(tN)` in HLSL), where N must be in the range 0..23)
  1155. - read/write storage buffers and storage images are bound as 'unordered-access-view'
  1156. (declared as `register(uN)` in HLSL where N is in the range 0..11)
  1157. Metal:
  1158. - in Metal there is no internal difference between vertex-, uniform- and
  1159. storage-buffers, all are bound to the same 'buffer bind slots' with the
  1160. following reserved ranges:
  1161. - vertex shader stage:
  1162. - uniform buffers: slots 0..7
  1163. - storage buffers: slots 8..15
  1164. - vertex buffers: slots 15..23
  1165. - fragment shader stage:
  1166. - uniform buffers: slots 0..7
  1167. - storage buffers: slots 8..15
  1168. - this means in MSL, storage buffer bindings start at [[buffer(8)]] both in
  1169. the vertex and fragment stage
  1170. GL:
  1171. - the GL backend doesn't use name-lookup to find storage buffer bindings, this
  1172. means you must annotate buffers with `layout(std430, binding=N)` in GLSL
  1173. - ...where N is 0..sg_limits.max_storage_buffer_bindings_per_stage.
  1174. WebGPU:
  1175. - in WGSL, textures, samplers and storage buffers all use a shared
  1176. bindspace across all shader stages on bindgroup 1:
  1177. `@group(1) @binding(0..127)
  1178. ON STORAGE IMAGES:
  1179. ==================
  1180. To write pixel data to texture objects in compute shaders, first an image
  1181. object must be created with `storage_image usage`:
  1182. sg_image storage_image = sg_make_image(&(sg_image_desc){
  1183. .usage.storage_image = true,
  1184. },
  1185. .width = ...,
  1186. .height = ...,
  1187. .pixel_format = ...,
  1188. });
  1189. Next a storage-image-view object is required which also allows to pick
  1190. a specific mip-level or slice for the compute-shader to access:
  1191. sg_view simg_view = sg_make_view(&(sg_view_desc){
  1192. .storage_image = {
  1193. .image = storage_image,
  1194. .mip_level = ...,
  1195. .slice = ...
  1196. },
  1197. });
  1198. Finally 'bind' the storage-image-view via a regular sg_apply_bindings() call
  1199. inside a compute pass:
  1200. sg_begin_pass(&(sg_pass){ .compute = true });
  1201. sg_apply_pipeline(...);
  1202. sg_apply_bindings(&(sg_bindings){
  1203. .views[VIEW_simg] = simg_view,
  1204. });
  1205. sg_dispatch(...);
  1206. sg_end_pass();
  1207. Currently, storage images can only be used with `readwrite` or `writeonly` access in
  1208. shaders. For readonly access use a regular texture binding instead.
  1209. For an example of using storage images in compute shaders see imageblur-sapp:
  1210. - C code: https://github.com/floooh/sokol-samples/blob/master/sapp/imageblur-sapp.c
  1211. - shader: https://github.com/floooh/sokol-samples/blob/master/sapp/imageblur-sapp.glsl
  1212. TRACE HOOKS:
  1213. ============
  1214. sokol_gfx.h optionally allows to install "trace hook" callbacks for
  1215. each public API functions. When a public API function is called, and
  1216. a trace hook callback has been installed for this function, the
  1217. callback will be invoked with the parameters and result of the function.
  1218. This is useful for things like debugging- and profiling-tools, or
  1219. keeping track of resource creation and destruction.
  1220. To use the trace hook feature:
  1221. --- Define SOKOL_TRACE_HOOKS before including the implementation.
  1222. --- Setup an sg_trace_hooks structure with your callback function
  1223. pointers (keep all function pointers you're not interested
  1224. in zero-initialized), optionally set the user_data member
  1225. in the sg_trace_hooks struct.
  1226. --- Install the trace hooks by calling sg_install_trace_hooks(),
  1227. the return value of this function is another sg_trace_hooks
  1228. struct which contains the previously set of trace hooks.
  1229. You should keep this struct around, and call those previous
  1230. functions pointers from your own trace callbacks for proper
  1231. chaining.
  1232. As an example of how trace hooks are used, have a look at the
  1233. imgui/sokol_gfx_imgui.h header which implements a realtime
  1234. debugging UI for sokol_gfx.h on top of Dear ImGui.
  1235. MEMORY ALLOCATION OVERRIDE
  1236. ==========================
  1237. You can override the memory allocation functions at initialization time
  1238. like this:
  1239. void* my_alloc(size_t size, void* user_data) {
  1240. return malloc(size);
  1241. }
  1242. void my_free(void* ptr, void* user_data) {
  1243. free(ptr);
  1244. }
  1245. ...
  1246. sg_setup(&(sg_desc){
  1247. // ...
  1248. .allocator = {
  1249. .alloc_fn = my_alloc,
  1250. .free_fn = my_free,
  1251. .user_data = ...,
  1252. }
  1253. });
  1254. ...
  1255. If no overrides are provided, malloc and free will be used.
  1256. This only affects memory allocation calls done by sokol_gfx.h
  1257. itself though, not any allocations in OS libraries.
  1258. ERROR REPORTING AND LOGGING
  1259. ===========================
  1260. To get any logging information at all you need to provide a logging callback in the setup call
  1261. the easiest way is to use sokol_log.h:
  1262. #include "sokol_log.h"
  1263. sg_setup(&(sg_desc){ .logger.func = slog_func });
  1264. To override logging with your own callback, first write a logging function like this:
  1265. void my_log(const char* tag, // e.g. 'sg'
  1266. uint32_t log_level, // 0=panic, 1=error, 2=warn, 3=info
  1267. uint32_t log_item_id, // SG_LOGITEM_*
  1268. const char* message_or_null, // a message string, may be nullptr in release mode
  1269. uint32_t line_nr, // line number in sokol_gfx.h
  1270. const char* filename_or_null, // source filename, may be nullptr in release mode
  1271. void* user_data)
  1272. {
  1273. ...
  1274. }
  1275. ...and then setup sokol-gfx like this:
  1276. sg_setup(&(sg_desc){
  1277. .logger = {
  1278. .func = my_log,
  1279. .user_data = my_user_data,
  1280. }
  1281. });
  1282. The provided logging function must be reentrant (e.g. be callable from
  1283. different threads).
  1284. If you don't want to provide your own custom logger it is highly recommended to use
  1285. the standard logger in sokol_log.h instead, otherwise you won't see any warnings or
  1286. errors.
  1287. COMMIT LISTENERS
  1288. ================
  1289. It's possible to hook callback functions into sokol-gfx which are called from
  1290. inside sg_commit() in unspecified order. This is mainly useful for libraries
  1291. that build on top of sokol_gfx.h to be notified about the end/start of a frame.
  1292. To add a commit listener, call:
  1293. static void my_commit_listener(void* user_data) {
  1294. ...
  1295. }
  1296. bool success = sg_add_commit_listener((sg_commit_listener){
  1297. .func = my_commit_listener,
  1298. .user_data = ...,
  1299. });
  1300. The function returns false if the internal array of commit listeners is full,
  1301. or the same commit listener had already been added.
  1302. If the function returns true, my_commit_listener() will be called each frame
  1303. from inside sg_commit().
  1304. By default, 1024 distinct commit listeners can be added, but this number
  1305. can be tweaked in the sg_setup() call:
  1306. sg_setup(&(sg_desc){
  1307. .max_commit_listeners = 2048,
  1308. });
  1309. An sg_commit_listener item is equal to another if both the function
  1310. pointer and user_data field are equal.
  1311. To remove a commit listener:
  1312. bool success = sg_remove_commit_listener((sg_commit_listener){
  1313. .func = my_commit_listener,
  1314. .user_data = ...,
  1315. });
  1316. ...where the .func and .user_data field are equal to a previous
  1317. sg_add_commit_listener() call. The function returns true if the commit
  1318. listener item was found and removed, and false otherwise.
  1319. RESOURCE CREATION AND DESTRUCTION IN DETAIL
  1320. ===========================================
  1321. The 'vanilla' way to create resource objects is with the 'make functions':
  1322. sg_buffer sg_make_buffer(const sg_buffer_desc* desc)
  1323. sg_image sg_make_image(const sg_image_desc* desc)
  1324. sg_sampler sg_make_sampler(const sg_sampler_desc* desc)
  1325. sg_shader sg_make_shader(const sg_shader_desc* desc)
  1326. sg_pipeline sg_make_pipeline(const sg_pipeline_desc* desc)
  1327. sg_view sg_make_view(const sg_view_desc* desc)
  1328. This will result in one of three cases:
  1329. 1. The returned handle is invalid. This happens when there are no more
  1330. free slots in the resource pool for this resource type. An invalid
  1331. handle is associated with the INVALID resource state, for instance:
  1332. sg_buffer buf = sg_make_buffer(...)
  1333. if (sg_query_buffer_state(buf) == SG_RESOURCESTATE_INVALID) {
  1334. // buffer pool is exhausted
  1335. }
  1336. 2. The returned handle is valid, but creating the underlying resource
  1337. has failed for some reason. This results in a resource object in the
  1338. FAILED state. The reason *why* resource creation has failed differ
  1339. by resource type. Look for log messages with more details. A failed
  1340. resource state can be checked with:
  1341. sg_buffer buf = sg_make_buffer(...)
  1342. if (sg_query_buffer_state(buf) == SG_RESOURCESTATE_FAILED) {
  1343. // creating the resource has failed
  1344. }
  1345. 3. And finally, if everything goes right, the returned resource is
  1346. in resource state VALID and ready to use. This can be checked
  1347. with:
  1348. sg_buffer buf = sg_make_buffer(...)
  1349. if (sg_query_buffer_state(buf) == SG_RESOURCESTATE_VALID) {
  1350. // creating the resource has failed
  1351. }
  1352. When calling the 'make functions', the created resource goes through a number
  1353. of states:
  1354. - INITIAL: the resource slot associated with the new resource is currently
  1355. free (technically, there is no resource yet, just an empty pool slot)
  1356. - ALLOC: a handle for the new resource has been allocated, this just means
  1357. a pool slot has been reserved.
  1358. - VALID or FAILED: in VALID state any 3D API backend resource objects have
  1359. been successfully created, otherwise if anything went wrong, the resource
  1360. will be in FAILED state.
  1361. Sometimes it makes sense to first grab a handle, but initialize the
  1362. underlying resource at a later time. For instance when loading data
  1363. asynchronously from a slow data source, you may know what buffers and
  1364. textures are needed at an early stage of the loading process, but actually
  1365. loading the buffer or texture content can only be completed at a later time.
  1366. For such situations, sokol-gfx resource objects can be created in two steps.
  1367. You can allocate a handle upfront with one of the 'alloc functions':
  1368. sg_buffer sg_alloc_buffer(void)
  1369. sg_image sg_alloc_image(void)
  1370. sg_sampler sg_alloc_sampler(void)
  1371. sg_shader sg_alloc_shader(void)
  1372. sg_pipeline sg_alloc_pipeline(void)
  1373. sg_view sg_alloc_view(void)
  1374. This will return a handle with the underlying resource object in the
  1375. ALLOC state:
  1376. sg_image img = sg_alloc_image();
  1377. if (sg_query_image_state(img) == SG_RESOURCESTATE_ALLOC) {
  1378. // allocating an image handle has succeeded, otherwise
  1379. // the image pool is full
  1380. }
  1381. Such an 'incomplete' handle can be used in most sokol-gfx rendering functions
  1382. without doing any harm, sokol-gfx will simply skip any rendering operation
  1383. that involve resources which are not in VALID state.
  1384. At a later time (for instance once the texture has completed loading
  1385. asynchronously), the resource creation can be completed by calling one of
  1386. the 'init functions', those functions take an existing resource handle and
  1387. 'desc struct':
  1388. void sg_init_buffer(sg_buffer buf, const sg_buffer_desc* desc)
  1389. void sg_init_image(sg_image img, const sg_image_desc* desc)
  1390. void sg_init_sampler(sg_sampler smp, const sg_sampler_desc* desc)
  1391. void sg_init_shader(sg_shader shd, const sg_shader_desc* desc)
  1392. void sg_init_pipeline(sg_pipeline pip, const sg_pipeline_desc* desc)
  1393. void sg_init_view(sg_view view, const sg_view_desc* desc)
  1394. The init functions expect a resource in ALLOC state, and after the function
  1395. returns, the resource will be either in VALID or FAILED state. Calling
  1396. an 'alloc function' followed by the matching 'init function' is fully
  1397. equivalent with calling the 'make function' alone.
  1398. Destruction can also happen as a two-step process. The 'uninit functions'
  1399. will put a resource object from the VALID or FAILED state back into the
  1400. ALLOC state:
  1401. void sg_uninit_buffer(sg_buffer buf)
  1402. void sg_uninit_image(sg_image img)
  1403. void sg_uninit_sampler(sg_sampler smp)
  1404. void sg_uninit_shader(sg_shader shd)
  1405. void sg_uninit_pipeline(sg_pipeline pip)
  1406. void sg_uninit_view(sg_view view)
  1407. Calling the 'uninit functions' with a resource that is not in the VALID or
  1408. FAILED state is a no-op.
  1409. To finally free the pool slot for recycling call the 'dealloc functions':
  1410. void sg_dealloc_buffer(sg_buffer buf)
  1411. void sg_dealloc_image(sg_image img)
  1412. void sg_dealloc_sampler(sg_sampler smp)
  1413. void sg_dealloc_shader(sg_shader shd)
  1414. void sg_dealloc_pipeline(sg_pipeline pip)
  1415. void sg_dealloc_view(sg_view view)
  1416. Calling the 'dealloc functions' on a resource that's not in ALLOC state is
  1417. a no-op, but will generate a warning log message.
  1418. Calling an 'uninit function' and 'dealloc function' in sequence is equivalent
  1419. with calling the associated 'destroy function':
  1420. void sg_destroy_buffer(sg_buffer buf)
  1421. void sg_destroy_image(sg_image img)
  1422. void sg_destroy_sampler(sg_sampler smp)
  1423. void sg_destroy_shader(sg_shader shd)
  1424. void sg_destroy_pipeline(sg_pipeline pip)
  1425. void sg_destroy_view(sg_view view)
  1426. The 'destroy functions' can be called on resources in any state and generally
  1427. do the right thing (for instance if the resource is in ALLOC state, the destroy
  1428. function will be equivalent to the 'dealloc function' and skip the 'uninit part').
  1429. And finally to close the circle, the 'fail functions' can be called to manually
  1430. put a resource in ALLOC state into the FAILED state:
  1431. sg_fail_buffer(sg_buffer buf)
  1432. sg_fail_image(sg_image img)
  1433. sg_fail_sampler(sg_sampler smp)
  1434. sg_fail_shader(sg_shader shd)
  1435. sg_fail_pipeline(sg_pipeline pip)
  1436. sg_fail_view(sg_view view)
  1437. This is recommended if anything went wrong outside of sokol-gfx during asynchronous
  1438. resource setup (for instance a file loading operation failed). In this case,
  1439. the 'fail function' should be called instead of the 'init function'.
  1440. Calling a 'fail function' on a resource that's not in ALLOC state is a no-op,
  1441. but will generate a warning log message.
  1442. NOTE: that two-step resource creation usually only makes sense for buffers,
  1443. images and views, but not for samplers, shaders or pipelines. Most notably, trying
  1444. to create a pipeline object with a shader that's not in VALID state will
  1445. trigger a validation layer error, or if the validation layer is disabled,
  1446. result in a pipeline object in FAILED state.
  1447. WEBGPU CAVEATS
  1448. ==============
  1449. For a general overview and design notes of the WebGPU backend see:
  1450. https://floooh.github.io/2023/10/16/sokol-webgpu.html
  1451. In general, don't expect an automatic speedup when switching from the WebGL2
  1452. backend to the WebGPU backend. Some WebGPU functions currently actually
  1453. have a higher CPU overhead than similar WebGL2 functions, leading to the
  1454. paradoxical situation that some WebGPU code may be slower than similar WebGL2
  1455. code.
  1456. - when writing WGSL shader code by hand, a specific bind-slot convention
  1457. must be used:
  1458. All uniform block structs must use `@group(0)` and bindings in the
  1459. range 0..15
  1460. @group(0) @binding(0..15)
  1461. All textures, samplers, storage-buffers and storage-images must use `@group(1)`
  1462. and bindings must be in the range 0..127:
  1463. @group(1) @binding(0..127)
  1464. Note that the number of texture, sampler, storage-buffer storage-image bindings
  1465. is still limited despite the large bind range:
  1466. - up to 16 textures and sampler across all shader stages
  1467. - up to 8 storage buffers across all shader stages
  1468. - up to 4 storage images on the compute shader stage
  1469. If you use sokol-shdc to generate WGSL shader code, you don't need to worry
  1470. about the above binding conventions since sokol-shdc will allocate
  1471. the WGSL bindslots).
  1472. - The sokol-gfx WebGPU backend uses the sg_desc.uniform_buffer_size item
  1473. to allocate a single per-frame uniform buffer which must be big enough
  1474. to hold all data written by sg_apply_uniforms() during a single frame,
  1475. including a worst-case 256-byte alignment (e.g. each sg_apply_uniform
  1476. call will cost at least 256 bytes of uniform buffer size). The default size
  1477. is 4 MB, which is enough for 16384 sg_apply_uniform() calls per
  1478. frame (assuming the uniform data 'payload' is less than 256 bytes
  1479. per call). These rules are the same as for the Metal backend, so if
  1480. you are already using the Metal backend you'll be fine.
  1481. - sg_apply_bindings(): the sokol-gfx WebGPU backend implements a bindgroup
  1482. cache to prevent excessive creation and destruction of BindGroup objects
  1483. when calling sg_apply_bindings(). The number of slots in the bindgroups
  1484. cache is defined in sg_desc.wgpu.bindgroups_cache_size when calling
  1485. sg_setup. The cache size must be a power-of-2 number, with the default being
  1486. 1024. The bindgroups cache behaviour can be observed by calling the new
  1487. function sg_query_stats(), where the following struct items are
  1488. of interest:
  1489. .wgpu.num_bindgroup_cache_hits
  1490. .wgpu.num_bindgroup_cache_misses
  1491. .wgpu.num_bindgroup_cache_collisions
  1492. .wgpu_num_bindgroup_cache_invalidates
  1493. .wgpu.num_bindgroup_cache_vs_hash_key_mismatch
  1494. The value to pay attention to is `.wgpu.num_bindgroup_cache_collisions`,
  1495. if this number is consistently higher than a few percent of the
  1496. .wgpu.num_set_bindgroup value, it might be a good idea to bump the
  1497. bindgroups cache size to the next power-of-2.
  1498. - sg_apply_viewport(): WebGPU currently has a unique restriction that viewport
  1499. rectangles must be contained entirely within the framebuffer. As a shitty
  1500. workaround sokol_gfx.h will clip incoming viewport rectangles against
  1501. the framebuffer, but this will distort the clipspace-to-screenspace mapping.
  1502. There's no proper way to handle this inside sokol_gfx.h, this must be fixed
  1503. in a future WebGPU update (see: https://github.com/gpuweb/gpuweb/issues/373
  1504. and https://github.com/gpuweb/gpuweb/pull/5025)
  1505. - The sokol shader compiler generally adds `diagnostic(off, derivative_uniformity);`
  1506. into the WGSL output. Currently only the Chrome WebGPU implementation seems
  1507. to recognize this.
  1508. - Likewise, the following sokol-gfx pixel formats are not supported in WebGPU:
  1509. R16, R16SN, RG16, RG16SN, RGBA16, RGBA16SN.
  1510. Unlike unsupported vertex formats, unsupported pixel formats can be queried
  1511. in cross-backend code via sg_query_pixelformat() though.
  1512. - The Emscripten WebGPU shim currently doesn't support the Closure minification
  1513. post-link-step (e.g. currently the emcc argument '--closure 1' or '--closure 2'
  1514. will generate broken Javascript code.
  1515. - sokol-gfx requires the WebGPU device feature `depth32float-stencil8` to be enabled
  1516. (this should be widely supported)
  1517. - sokol-gfx expects that the WebGPU device feature `float32-filterable` to *not* be
  1518. enabled (since this would exclude all iOS devices)
  1519. LICENSE
  1520. =======
  1521. zlib/libpng license
  1522. Copyright (c) 2018 Andre Weissflog
  1523. This software is provided 'as-is', without any express or implied warranty.
  1524. In no event will the authors be held liable for any damages arising from the
  1525. use of this software.
  1526. Permission is granted to anyone to use this software for any purpose,
  1527. including commercial applications, and to alter it and redistribute it
  1528. freely, subject to the following restrictions:
  1529. 1. The origin of this software must not be misrepresented; you must not
  1530. claim that you wrote the original software. If you use this software in a
  1531. product, an acknowledgment in the product documentation would be
  1532. appreciated but is not required.
  1533. 2. Altered source versions must be plainly marked as such, and must not
  1534. be misrepresented as being the original software.
  1535. 3. This notice may not be removed or altered from any source
  1536. distribution.
  1537. */
  1538. #define SOKOL_GFX_INCLUDED (1)
  1539. #include <stddef.h> // size_t
  1540. #include <stdint.h>
  1541. #include <stdbool.h>
  1542. #if defined(SOKOL_API_DECL) && !defined(SOKOL_GFX_API_DECL)
  1543. #define SOKOL_GFX_API_DECL SOKOL_API_DECL
  1544. #endif
  1545. #ifndef SOKOL_GFX_API_DECL
  1546. #if defined(_WIN32) && defined(SOKOL_DLL) && defined(SOKOL_GFX_IMPL)
  1547. #define SOKOL_GFX_API_DECL __declspec(dllexport)
  1548. #elif defined(_WIN32) && defined(SOKOL_DLL)
  1549. #define SOKOL_GFX_API_DECL __declspec(dllimport)
  1550. #else
  1551. #define SOKOL_GFX_API_DECL extern
  1552. #endif
  1553. #endif
  1554. #ifdef __cplusplus
  1555. extern "C" {
  1556. #endif
  1557. /*
  1558. Resource id typedefs:
  1559. sg_buffer: vertex- and index-buffers
  1560. sg_image: images used as textures and render-pass attachments
  1561. sg_sampler sampler objects describing how a texture is sampled in a shader
  1562. sg_shader: vertex- and fragment-shaders and shader interface information
  1563. sg_pipeline: associated shader and vertex-layouts, and render states
  1564. sg_view: a resource view object used for bindings and render-pass attachments
  1565. Instead of pointers, resource creation functions return a 32-bit
  1566. handle which uniquely identifies the resource object.
  1567. The 32-bit resource id is split into a 16-bit pool index in the lower bits,
  1568. and a 16-bit 'generation counter' in the upper bits. The index allows fast
  1569. pool lookups, and combined with the generation-counter it allows to detect
  1570. 'dangling accesses' (trying to use an object which no longer exists, and
  1571. its pool slot has been reused for a new object)
  1572. The resource ids are wrapped into a strongly-typed struct so that
  1573. trying to pass an incompatible resource id is a compile error.
  1574. */
  1575. typedef struct sg_buffer { uint32_t id; } sg_buffer;
  1576. typedef struct sg_image { uint32_t id; } sg_image;
  1577. typedef struct sg_sampler { uint32_t id; } sg_sampler;
  1578. typedef struct sg_shader { uint32_t id; } sg_shader;
  1579. typedef struct sg_pipeline { uint32_t id; } sg_pipeline;
  1580. typedef struct sg_view { uint32_t id; } sg_view;
  1581. /*
  1582. sg_range is a pointer-size-pair struct used to pass memory blobs into
  1583. sokol-gfx. When initialized from a value type (array or struct), you can
  1584. use the SG_RANGE() macro to build an sg_range struct. For functions which
  1585. take either a sg_range pointer, or a (C++) sg_range reference, use the
  1586. SG_RANGE_REF macro as a solution which compiles both in C and C++.
  1587. */
  1588. typedef struct sg_range {
  1589. const void* ptr;
  1590. size_t size;
  1591. } sg_range;
  1592. // disabling this for every includer isn't great, but the warnings are also quite pointless
  1593. #if defined(_MSC_VER)
  1594. #pragma warning(disable:4221) // /W4 only: nonstandard extension used: 'x': cannot be initialized using address of automatic variable 'y'
  1595. #pragma warning(disable:4204) // VS2015: nonstandard extension used: non-constant aggregate initializer
  1596. #endif
  1597. #if defined(__cplusplus)
  1598. #define SG_RANGE(x) sg_range{ &x, sizeof(x) }
  1599. #define SG_RANGE_REF(x) sg_range{ &x, sizeof(x) }
  1600. #else
  1601. #define SG_RANGE(x) (sg_range){ &x, sizeof(x) }
  1602. #define SG_RANGE_REF(x) &(sg_range){ &x, sizeof(x) }
  1603. #endif
  1604. // various compile-time constants in the public API
  1605. enum {
  1606. SG_INVALID_ID = 0,
  1607. SG_NUM_INFLIGHT_FRAMES = 2,
  1608. SG_MAX_COLOR_ATTACHMENTS = 8,
  1609. SG_MAX_UNIFORMBLOCK_MEMBERS = 16,
  1610. SG_MAX_VERTEX_ATTRIBUTES = 16,
  1611. SG_MAX_MIPMAPS = 16,
  1612. SG_MAX_VERTEXBUFFER_BINDSLOTS = 8,
  1613. SG_MAX_UNIFORMBLOCK_BINDSLOTS = 8,
  1614. SG_MAX_VIEW_BINDSLOTS = 32,
  1615. SG_MAX_SAMPLER_BINDSLOTS = 12,
  1616. SG_MAX_TEXTURE_SAMPLER_PAIRS = 32, // same as SG_MAX_VIEW_BINDSLOTS
  1617. SG_MAX_PORTABLE_COLOR_ATTACHMENTS = 4,
  1618. SG_MAX_PORTABLE_TEXTURE_BINDINGS_PER_STAGE = 16,
  1619. SG_MAX_PORTABLE_STORAGEBUFFER_BINDINGS_PER_STAGE = 8, // assuming sg_features.compute = true
  1620. SG_MAX_PORTABLE_STORAGEIMAGE_BINDINGS_PER_STAGE = 4, // assuming sg_features.compute = true
  1621. };
  1622. /*
  1623. sg_color
  1624. An RGBA color value.
  1625. */
  1626. typedef struct sg_color { float r, g, b, a; } sg_color;
  1627. /*
  1628. sg_backend
  1629. The active 3D-API backend, use the function sg_query_backend()
  1630. to get the currently active backend.
  1631. */
  1632. typedef enum sg_backend {
  1633. SG_BACKEND_GLCORE,
  1634. SG_BACKEND_GLES3,
  1635. SG_BACKEND_D3D11,
  1636. SG_BACKEND_METAL_IOS,
  1637. SG_BACKEND_METAL_MACOS,
  1638. SG_BACKEND_METAL_SIMULATOR,
  1639. SG_BACKEND_WGPU,
  1640. SG_BACKEND_VULKAN,
  1641. SG_BACKEND_DUMMY,
  1642. } sg_backend;
  1643. /*
  1644. sg_pixel_format
  1645. sokol_gfx.h basically uses the same pixel formats as WebGPU, since these
  1646. are supported on most newer GPUs.
  1647. A pixelformat name consist of three parts:
  1648. - components (R, RG, RGB or RGBA)
  1649. - bit width per component (8, 16 or 32)
  1650. - component data type:
  1651. - unsigned normalized (no postfix)
  1652. - signed normalized (SN postfix)
  1653. - unsigned integer (UI postfix)
  1654. - signed integer (SI postfix)
  1655. - float (F postfix)
  1656. Not all pixel formats can be used for everything, call sg_query_pixelformat()
  1657. to inspect the capabilities of a given pixelformat. The function returns
  1658. an sg_pixelformat_info struct with the following members:
  1659. - sample: the pixelformat can be sampled as texture at least with
  1660. nearest filtering
  1661. - filter: the pixelformat can be sampled as texture with linear
  1662. filtering
  1663. - render: the pixelformat can be used as render-pass attachment
  1664. - blend: blending is supported when used as render-pass attachment
  1665. - msaa: multisample-antialiasing is supported when used
  1666. as render-pass attachment
  1667. - depth: the pixelformat can be used for depth-stencil attachments
  1668. - compressed: this is a block-compressed format
  1669. - bytes_per_pixel: the numbers of bytes in a pixel (0 for compressed formats)
  1670. The default pixel format for texture images is SG_PIXELFORMAT_RGBA8.
  1671. The default pixel format for render target images is platform-dependent
  1672. and taken from the sg_environment struct passed into sg_setup(). Typically
  1673. the default formats are:
  1674. - for the Metal, D3D11 and WebGPU backends: SG_PIXELFORMAT_BGRA8
  1675. - for GL backends: SG_PIXELFORMAT_RGBA8
  1676. */
  1677. typedef enum sg_pixel_format {
  1678. _SG_PIXELFORMAT_DEFAULT, // value 0 reserved for default-init
  1679. SG_PIXELFORMAT_NONE,
  1680. SG_PIXELFORMAT_R8,
  1681. SG_PIXELFORMAT_R8SN,
  1682. SG_PIXELFORMAT_R8UI,
  1683. SG_PIXELFORMAT_R8SI,
  1684. SG_PIXELFORMAT_R16,
  1685. SG_PIXELFORMAT_R16SN,
  1686. SG_PIXELFORMAT_R16UI,
  1687. SG_PIXELFORMAT_R16SI,
  1688. SG_PIXELFORMAT_R16F,
  1689. SG_PIXELFORMAT_RG8,
  1690. SG_PIXELFORMAT_RG8SN,
  1691. SG_PIXELFORMAT_RG8UI,
  1692. SG_PIXELFORMAT_RG8SI,
  1693. SG_PIXELFORMAT_R32UI,
  1694. SG_PIXELFORMAT_R32SI,
  1695. SG_PIXELFORMAT_R32F,
  1696. SG_PIXELFORMAT_RG16,
  1697. SG_PIXELFORMAT_RG16SN,
  1698. SG_PIXELFORMAT_RG16UI,
  1699. SG_PIXELFORMAT_RG16SI,
  1700. SG_PIXELFORMAT_RG16F,
  1701. SG_PIXELFORMAT_RGBA8,
  1702. SG_PIXELFORMAT_SRGB8A8,
  1703. SG_PIXELFORMAT_RGBA8SN,
  1704. SG_PIXELFORMAT_RGBA8UI,
  1705. SG_PIXELFORMAT_RGBA8SI,
  1706. SG_PIXELFORMAT_BGRA8,
  1707. SG_PIXELFORMAT_RGB10A2,
  1708. SG_PIXELFORMAT_RG11B10F,
  1709. SG_PIXELFORMAT_RGB9E5,
  1710. SG_PIXELFORMAT_RG32UI,
  1711. SG_PIXELFORMAT_RG32SI,
  1712. SG_PIXELFORMAT_RG32F,
  1713. SG_PIXELFORMAT_RGBA16,
  1714. SG_PIXELFORMAT_RGBA16SN,
  1715. SG_PIXELFORMAT_RGBA16UI,
  1716. SG_PIXELFORMAT_RGBA16SI,
  1717. SG_PIXELFORMAT_RGBA16F,
  1718. SG_PIXELFORMAT_RGBA32UI,
  1719. SG_PIXELFORMAT_RGBA32SI,
  1720. SG_PIXELFORMAT_RGBA32F,
  1721. SG_PIXELFORMAT_DEPTH,
  1722. SG_PIXELFORMAT_DEPTH_STENCIL,
  1723. // NOTE: don't put any new compressed format in front of here
  1724. SG_PIXELFORMAT_BC1_RGBA,
  1725. SG_PIXELFORMAT_BC2_RGBA,
  1726. SG_PIXELFORMAT_BC3_RGBA,
  1727. SG_PIXELFORMAT_BC3_SRGBA,
  1728. SG_PIXELFORMAT_BC4_R,
  1729. SG_PIXELFORMAT_BC4_RSN,
  1730. SG_PIXELFORMAT_BC5_RG,
  1731. SG_PIXELFORMAT_BC5_RGSN,
  1732. SG_PIXELFORMAT_BC6H_RGBF,
  1733. SG_PIXELFORMAT_BC6H_RGBUF,
  1734. SG_PIXELFORMAT_BC7_RGBA,
  1735. SG_PIXELFORMAT_BC7_SRGBA,
  1736. SG_PIXELFORMAT_ETC2_RGB8,
  1737. SG_PIXELFORMAT_ETC2_SRGB8,
  1738. SG_PIXELFORMAT_ETC2_RGB8A1,
  1739. SG_PIXELFORMAT_ETC2_RGBA8,
  1740. SG_PIXELFORMAT_ETC2_SRGB8A8,
  1741. SG_PIXELFORMAT_EAC_R11,
  1742. SG_PIXELFORMAT_EAC_R11SN,
  1743. SG_PIXELFORMAT_EAC_RG11,
  1744. SG_PIXELFORMAT_EAC_RG11SN,
  1745. SG_PIXELFORMAT_ASTC_4x4_RGBA,
  1746. SG_PIXELFORMAT_ASTC_4x4_SRGBA,
  1747. _SG_PIXELFORMAT_NUM,
  1748. _SG_PIXELFORMAT_FORCE_U32 = 0x7FFFFFFF
  1749. } sg_pixel_format;
  1750. /*
  1751. Runtime information about a pixel format, returned by sg_query_pixelformat().
  1752. */
  1753. typedef struct sg_pixelformat_info {
  1754. bool sample; // pixel format can be sampled in shaders at least with nearest filtering
  1755. bool filter; // pixel format can be sampled with linear filtering
  1756. bool render; // pixel format can be used as render-pass attachment
  1757. bool blend; // pixel format supports alpha-blending when used as render-pass attachment
  1758. bool msaa; // pixel format supports MSAA when used as render-pass attachment
  1759. bool depth; // pixel format is a depth format
  1760. bool compressed; // true if this is a hardware-compressed format
  1761. bool read; // true if format supports compute shader read access
  1762. bool write; // true if format supports compute shader write access
  1763. int bytes_per_pixel; // NOTE: this is 0 for compressed formats, use sg_query_row_pitch() / sg_query_surface_pitch() as alternative
  1764. } sg_pixelformat_info;
  1765. /*
  1766. Runtime information about available optional features, returned by sg_query_features()
  1767. */
  1768. typedef struct sg_features {
  1769. bool origin_top_left; // framebuffer- and texture-origin is in top left corner
  1770. bool image_clamp_to_border; // border color and clamp-to-border uv-wrap mode is supported
  1771. bool mrt_independent_blend_state; // multiple-render-target rendering can use per-render-target blend state
  1772. bool mrt_independent_write_mask; // multiple-render-target rendering can use per-render-target color write masks
  1773. bool compute; // storage buffers and compute shaders are supported
  1774. bool msaa_texture_bindings; // if true, multisampled images can be bound as textures
  1775. bool separate_buffer_types; // cannot use the same buffer for vertex and indices (only WebGL2)
  1776. bool draw_base_vertex; // draw with (base vertex > 0) && (base_instance == 0) supported
  1777. bool draw_base_instance; // draw with (base instance > 0) supported
  1778. bool gl_texture_views; // supports 'proper' texture views (GL 4.3+)
  1779. } sg_features;
  1780. /*
  1781. Runtime information about resource limits, returned by sg_query_limit()
  1782. */
  1783. typedef struct sg_limits {
  1784. int max_image_size_2d; // max width/height of SG_IMAGETYPE_2D images
  1785. int max_image_size_cube; // max width/height of SG_IMAGETYPE_CUBE images
  1786. int max_image_size_3d; // max width/height/depth of SG_IMAGETYPE_3D images
  1787. int max_image_size_array; // max width/height of SG_IMAGETYPE_ARRAY images
  1788. int max_image_array_layers; // max number of layers in SG_IMAGETYPE_ARRAY images
  1789. int max_vertex_attrs; // max number of vertex attributes, clamped to SG_MAX_VERTEX_ATTRIBUTES
  1790. int max_color_attachments; // max number of render pass color attachments, clamped to SG_MAX_COLOR_ATTACHMENTS
  1791. int max_texture_bindings_per_stage; // max number of texture bindings per shader stage, clamped to SG_MAX_VIEW_BINDSLOTS
  1792. int max_storage_buffer_bindings_per_stage; // max number of storage buffer bindings per shader stage, clamped to SG_MAX_VIEW_BINDSLOTS
  1793. int max_storage_image_bindings_per_stage; // max number of storage image bindings per shader stage, clamped to SG_MAX_VIEW_BINDSLOTS
  1794. int gl_max_vertex_uniform_components; // GL_MAX_VERTEX_UNIFORM_COMPONENTS (only on GL backends)
  1795. int gl_max_combined_texture_image_units; // GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS (only on GL backends)
  1796. int d3d11_max_unordered_access_views; // 8 on feature level 11.0, otherwise 32 (clamped to SG_MAX_VIEW_BINDSLOTS)
  1797. int vk_min_uniform_buffer_offset_alignment;
  1798. } sg_limits;
  1799. /*
  1800. sg_resource_state
  1801. The current state of a resource in its resource pool.
  1802. Resources start in the INITIAL state, which means the
  1803. pool slot is unoccupied and can be allocated. When a resource is
  1804. created, first an id is allocated, and the resource pool slot
  1805. is set to state ALLOC. After allocation, the resource is
  1806. initialized, which may result in the VALID or FAILED state. The
  1807. reason why allocation and initialization are separate is because
  1808. some resource types (e.g. buffers and images) might be asynchronously
  1809. initialized by the user application. If a resource which is not
  1810. in the VALID state is attempted to be used for rendering, rendering
  1811. operations will silently be dropped.
  1812. The special INVALID state is returned in sg_query_xxx_state() if no
  1813. resource object exists for the provided resource id.
  1814. */
  1815. typedef enum sg_resource_state {
  1816. SG_RESOURCESTATE_INITIAL,
  1817. SG_RESOURCESTATE_ALLOC,
  1818. SG_RESOURCESTATE_VALID,
  1819. SG_RESOURCESTATE_FAILED,
  1820. SG_RESOURCESTATE_INVALID,
  1821. _SG_RESOURCESTATE_FORCE_U32 = 0x7FFFFFFF
  1822. } sg_resource_state;
  1823. /*
  1824. sg_index_type
  1825. Indicates whether indexed rendering (fetching vertex-indices from an
  1826. index buffer) is used, and if yes, the index data type (16- or 32-bits).
  1827. This is used in the sg_pipeline_desc.index_type member when creating a
  1828. pipeline object.
  1829. The default index type is SG_INDEXTYPE_NONE.
  1830. */
  1831. typedef enum sg_index_type {
  1832. _SG_INDEXTYPE_DEFAULT, // value 0 reserved for default-init
  1833. SG_INDEXTYPE_NONE,
  1834. SG_INDEXTYPE_UINT16,
  1835. SG_INDEXTYPE_UINT32,
  1836. _SG_INDEXTYPE_NUM,
  1837. _SG_INDEXTYPE_FORCE_U32 = 0x7FFFFFFF
  1838. } sg_index_type;
  1839. /*
  1840. sg_image_type
  1841. Indicates the basic type of an image object (2D-texture, cubemap,
  1842. 3D-texture or 2D-array-texture). Used in the sg_image_desc.type member when
  1843. creating an image, and in sg_shader_image_desc to describe a sampled texture
  1844. in the shader (both must match and will be checked in the validation layer
  1845. when calling sg_apply_bindings).
  1846. The default image type when creating an image is SG_IMAGETYPE_2D.
  1847. */
  1848. typedef enum sg_image_type {
  1849. _SG_IMAGETYPE_DEFAULT, // value 0 reserved for default-init
  1850. SG_IMAGETYPE_2D,
  1851. SG_IMAGETYPE_CUBE,
  1852. SG_IMAGETYPE_3D,
  1853. SG_IMAGETYPE_ARRAY,
  1854. _SG_IMAGETYPE_NUM,
  1855. _SG_IMAGETYPE_FORCE_U32 = 0x7FFFFFFF
  1856. } sg_image_type;
  1857. /*
  1858. sg_image_sample_type
  1859. The basic data type of a texture sample as expected by a shader.
  1860. Must be provided in sg_shader_image and used by the validation
  1861. layer in sg_apply_bindings() to check if the provided image object
  1862. is compatible with what the shader expects. Apart from the sokol-gfx
  1863. validation layer, WebGPU is the only backend API which actually requires
  1864. matching texture and sampler type to be provided upfront for validation
  1865. (other 3D APIs treat texture/sampler type mismatches as undefined behaviour).
  1866. NOTE that the following texture pixel formats require the use
  1867. of SG_IMAGESAMPLETYPE_UNFILTERABLE_FLOAT, combined with a sampler
  1868. of type SG_SAMPLERTYPE_NONFILTERING:
  1869. - SG_PIXELFORMAT_R32F
  1870. - SG_PIXELFORMAT_RG32F
  1871. - SG_PIXELFORMAT_RGBA32F
  1872. (when using sokol-shdc, also check out the meta tags `@image_sample_type`
  1873. and `@sampler_type`)
  1874. */
  1875. typedef enum sg_image_sample_type {
  1876. _SG_IMAGESAMPLETYPE_DEFAULT, // value 0 reserved for default-init
  1877. SG_IMAGESAMPLETYPE_FLOAT,
  1878. SG_IMAGESAMPLETYPE_DEPTH,
  1879. SG_IMAGESAMPLETYPE_SINT,
  1880. SG_IMAGESAMPLETYPE_UINT,
  1881. SG_IMAGESAMPLETYPE_UNFILTERABLE_FLOAT,
  1882. _SG_IMAGESAMPLETYPE_NUM,
  1883. _SG_IMAGESAMPLETYPE_FORCE_U32 = 0x7FFFFFFF
  1884. } sg_image_sample_type;
  1885. /*
  1886. sg_sampler_type
  1887. The basic type of a texture sampler (sampling vs comparison) as
  1888. defined in a shader. Must be provided in sg_shader_sampler_desc.
  1889. sg_image_sample_type and sg_sampler_type for a texture/sampler
  1890. pair must be compatible with each other, specifically only
  1891. the following pairs are allowed:
  1892. - SG_IMAGESAMPLETYPE_FLOAT => (SG_SAMPLERTYPE_FILTERING or SG_SAMPLERTYPE_NONFILTERING)
  1893. - SG_IMAGESAMPLETYPE_UNFILTERABLE_FLOAT => SG_SAMPLERTYPE_NONFILTERING
  1894. - SG_IMAGESAMPLETYPE_SINT => SG_SAMPLERTYPE_NONFILTERING
  1895. - SG_IMAGESAMPLETYPE_UINT => SG_SAMPLERTYPE_NONFILTERING
  1896. - SG_IMAGESAMPLETYPE_DEPTH => SG_SAMPLERTYPE_COMPARISON
  1897. */
  1898. typedef enum sg_sampler_type {
  1899. _SG_SAMPLERTYPE_DEFAULT,
  1900. SG_SAMPLERTYPE_FILTERING,
  1901. SG_SAMPLERTYPE_NONFILTERING,
  1902. SG_SAMPLERTYPE_COMPARISON,
  1903. _SG_SAMPLERTYPE_NUM,
  1904. _SG_SAMPLERTYPE_FORCE_U32,
  1905. } sg_sampler_type;
  1906. /*
  1907. sg_primitive_type
  1908. This is the common subset of 3D primitive types supported across all 3D
  1909. APIs. This is used in the sg_pipeline_desc.primitive_type member when
  1910. creating a pipeline object.
  1911. The default primitive type is SG_PRIMITIVETYPE_TRIANGLES.
  1912. */
  1913. typedef enum sg_primitive_type {
  1914. _SG_PRIMITIVETYPE_DEFAULT, // value 0 reserved for default-init
  1915. SG_PRIMITIVETYPE_POINTS,
  1916. SG_PRIMITIVETYPE_LINES,
  1917. SG_PRIMITIVETYPE_LINE_STRIP,
  1918. SG_PRIMITIVETYPE_TRIANGLES,
  1919. SG_PRIMITIVETYPE_TRIANGLE_STRIP,
  1920. _SG_PRIMITIVETYPE_NUM,
  1921. _SG_PRIMITIVETYPE_FORCE_U32 = 0x7FFFFFFF
  1922. } sg_primitive_type;
  1923. /*
  1924. sg_filter
  1925. The filtering mode when sampling a texture image. This is
  1926. used in the sg_sampler_desc.min_filter, sg_sampler_desc.mag_filter
  1927. and sg_sampler_desc.mipmap_filter members when creating a sampler object.
  1928. For the default is SG_FILTER_NEAREST.
  1929. */
  1930. typedef enum sg_filter {
  1931. _SG_FILTER_DEFAULT, // value 0 reserved for default-init
  1932. SG_FILTER_NEAREST,
  1933. SG_FILTER_LINEAR,
  1934. _SG_FILTER_NUM,
  1935. _SG_FILTER_FORCE_U32 = 0x7FFFFFFF
  1936. } sg_filter;
  1937. /*
  1938. sg_wrap
  1939. The texture coordinates wrapping mode when sampling a texture
  1940. image. This is used in the sg_image_desc.wrap_u, .wrap_v
  1941. and .wrap_w members when creating an image.
  1942. The default wrap mode is SG_WRAP_REPEAT.
  1943. NOTE: SG_WRAP_CLAMP_TO_BORDER is not supported on all backends
  1944. and platforms. To check for support, call sg_query_features()
  1945. and check the "clamp_to_border" boolean in the returned
  1946. sg_features struct.
  1947. Platforms which don't support SG_WRAP_CLAMP_TO_BORDER will silently fall back
  1948. to SG_WRAP_CLAMP_TO_EDGE without a validation error.
  1949. */
  1950. typedef enum sg_wrap {
  1951. _SG_WRAP_DEFAULT, // value 0 reserved for default-init
  1952. SG_WRAP_REPEAT,
  1953. SG_WRAP_CLAMP_TO_EDGE,
  1954. SG_WRAP_CLAMP_TO_BORDER,
  1955. SG_WRAP_MIRRORED_REPEAT,
  1956. _SG_WRAP_NUM,
  1957. _SG_WRAP_FORCE_U32 = 0x7FFFFFFF
  1958. } sg_wrap;
  1959. /*
  1960. sg_border_color
  1961. The border color to use when sampling a texture, and the UV wrap
  1962. mode is SG_WRAP_CLAMP_TO_BORDER.
  1963. The default border color is SG_BORDERCOLOR_OPAQUE_BLACK
  1964. */
  1965. typedef enum sg_border_color {
  1966. _SG_BORDERCOLOR_DEFAULT, // value 0 reserved for default-init
  1967. SG_BORDERCOLOR_TRANSPARENT_BLACK,
  1968. SG_BORDERCOLOR_OPAQUE_BLACK,
  1969. SG_BORDERCOLOR_OPAQUE_WHITE,
  1970. _SG_BORDERCOLOR_NUM,
  1971. _SG_BORDERCOLOR_FORCE_U32 = 0x7FFFFFFF
  1972. } sg_border_color;
  1973. /*
  1974. sg_vertex_format
  1975. The data type of a vertex component. This is used to describe
  1976. the layout of input vertex data when creating a pipeline object.
  1977. NOTE that specific mapping rules exist from the CPU-side vertex
  1978. formats to the vertex attribute base type in the vertex shader code
  1979. (see doc header section 'ON VERTEX FORMATS').
  1980. */
  1981. typedef enum sg_vertex_format {
  1982. SG_VERTEXFORMAT_INVALID,
  1983. SG_VERTEXFORMAT_FLOAT,
  1984. SG_VERTEXFORMAT_FLOAT2,
  1985. SG_VERTEXFORMAT_FLOAT3,
  1986. SG_VERTEXFORMAT_FLOAT4,
  1987. SG_VERTEXFORMAT_INT,
  1988. SG_VERTEXFORMAT_INT2,
  1989. SG_VERTEXFORMAT_INT3,
  1990. SG_VERTEXFORMAT_INT4,
  1991. SG_VERTEXFORMAT_UINT,
  1992. SG_VERTEXFORMAT_UINT2,
  1993. SG_VERTEXFORMAT_UINT3,
  1994. SG_VERTEXFORMAT_UINT4,
  1995. SG_VERTEXFORMAT_BYTE4,
  1996. SG_VERTEXFORMAT_BYTE4N,
  1997. SG_VERTEXFORMAT_UBYTE4,
  1998. SG_VERTEXFORMAT_UBYTE4N,
  1999. SG_VERTEXFORMAT_SHORT2,
  2000. SG_VERTEXFORMAT_SHORT2N,
  2001. SG_VERTEXFORMAT_USHORT2,
  2002. SG_VERTEXFORMAT_USHORT2N,
  2003. SG_VERTEXFORMAT_SHORT4,
  2004. SG_VERTEXFORMAT_SHORT4N,
  2005. SG_VERTEXFORMAT_USHORT4,
  2006. SG_VERTEXFORMAT_USHORT4N,
  2007. SG_VERTEXFORMAT_UINT10_N2,
  2008. SG_VERTEXFORMAT_HALF2,
  2009. SG_VERTEXFORMAT_HALF4,
  2010. _SG_VERTEXFORMAT_NUM,
  2011. _SG_VERTEXFORMAT_FORCE_U32 = 0x7FFFFFFF
  2012. } sg_vertex_format;
  2013. /*
  2014. sg_vertex_step
  2015. Defines whether the input pointer of a vertex input stream is advanced
  2016. 'per vertex' or 'per instance'. The default step-func is
  2017. SG_VERTEXSTEP_PER_VERTEX. SG_VERTEXSTEP_PER_INSTANCE is used with
  2018. instanced-rendering.
  2019. The vertex-step is part of the vertex-layout definition
  2020. when creating pipeline objects.
  2021. */
  2022. typedef enum sg_vertex_step {
  2023. _SG_VERTEXSTEP_DEFAULT, // value 0 reserved for default-init
  2024. SG_VERTEXSTEP_PER_VERTEX,
  2025. SG_VERTEXSTEP_PER_INSTANCE,
  2026. _SG_VERTEXSTEP_NUM,
  2027. _SG_VERTEXSTEP_FORCE_U32 = 0x7FFFFFFF
  2028. } sg_vertex_step;
  2029. /*
  2030. sg_uniform_type
  2031. The data type of a uniform block member. This is used to
  2032. describe the internal layout of uniform blocks when creating
  2033. a shader object. This is only required for the GL backend, all
  2034. other backends will ignore the interior layout of uniform blocks.
  2035. */
  2036. typedef enum sg_uniform_type {
  2037. SG_UNIFORMTYPE_INVALID,
  2038. SG_UNIFORMTYPE_FLOAT,
  2039. SG_UNIFORMTYPE_FLOAT2,
  2040. SG_UNIFORMTYPE_FLOAT3,
  2041. SG_UNIFORMTYPE_FLOAT4,
  2042. SG_UNIFORMTYPE_INT,
  2043. SG_UNIFORMTYPE_INT2,
  2044. SG_UNIFORMTYPE_INT3,
  2045. SG_UNIFORMTYPE_INT4,
  2046. SG_UNIFORMTYPE_MAT4,
  2047. _SG_UNIFORMTYPE_NUM,
  2048. _SG_UNIFORMTYPE_FORCE_U32 = 0x7FFFFFFF
  2049. } sg_uniform_type;
  2050. /*
  2051. sg_uniform_layout
  2052. A hint for the interior memory layout of uniform blocks. This is
  2053. only relevant for the GL backend where the internal layout
  2054. of uniform blocks must be known to sokol-gfx. For all other backends the
  2055. internal memory layout of uniform blocks doesn't matter, sokol-gfx
  2056. will just pass uniform data as an opaque memory blob to the
  2057. 3D backend.
  2058. SG_UNIFORMLAYOUT_NATIVE (default)
  2059. Native layout means that a 'backend-native' memory layout
  2060. is used. For the GL backend this means that uniforms
  2061. are packed tightly in memory (e.g. there are no padding
  2062. bytes).
  2063. SG_UNIFORMLAYOUT_STD140
  2064. The memory layout is a subset of std140. Arrays are only
  2065. allowed for the FLOAT4, INT4 and MAT4. Alignment is as
  2066. is as follows:
  2067. FLOAT, INT: 4 byte alignment
  2068. FLOAT2, INT2: 8 byte alignment
  2069. FLOAT3, INT3: 16 byte alignment(!)
  2070. FLOAT4, INT4: 16 byte alignment
  2071. MAT4: 16 byte alignment
  2072. FLOAT4[], INT4[]: 16 byte alignment
  2073. The overall size of the uniform block must be a multiple
  2074. of 16.
  2075. For more information search for 'UNIFORM DATA LAYOUT' in the documentation block
  2076. at the start of the header.
  2077. */
  2078. typedef enum sg_uniform_layout {
  2079. _SG_UNIFORMLAYOUT_DEFAULT, // value 0 reserved for default-init
  2080. SG_UNIFORMLAYOUT_NATIVE, // default: layout depends on currently active backend
  2081. SG_UNIFORMLAYOUT_STD140, // std140: memory layout according to std140
  2082. _SG_UNIFORMLAYOUT_NUM,
  2083. _SG_UNIFORMLAYOUT_FORCE_U32 = 0x7FFFFFFF
  2084. } sg_uniform_layout;
  2085. /*
  2086. sg_cull_mode
  2087. The face-culling mode, this is used in the
  2088. sg_pipeline_desc.cull_mode member when creating a
  2089. pipeline object.
  2090. The default cull mode is SG_CULLMODE_NONE
  2091. */
  2092. typedef enum sg_cull_mode {
  2093. _SG_CULLMODE_DEFAULT, // value 0 reserved for default-init
  2094. SG_CULLMODE_NONE,
  2095. SG_CULLMODE_FRONT,
  2096. SG_CULLMODE_BACK,
  2097. _SG_CULLMODE_NUM,
  2098. _SG_CULLMODE_FORCE_U32 = 0x7FFFFFFF
  2099. } sg_cull_mode;
  2100. /*
  2101. sg_face_winding
  2102. The vertex-winding rule that determines a front-facing primitive. This
  2103. is used in the member sg_pipeline_desc.face_winding
  2104. when creating a pipeline object.
  2105. The default winding is SG_FACEWINDING_CW (clockwise)
  2106. */
  2107. typedef enum sg_face_winding {
  2108. _SG_FACEWINDING_DEFAULT, // value 0 reserved for default-init
  2109. SG_FACEWINDING_CCW,
  2110. SG_FACEWINDING_CW,
  2111. _SG_FACEWINDING_NUM,
  2112. _SG_FACEWINDING_FORCE_U32 = 0x7FFFFFFF
  2113. } sg_face_winding;
  2114. /*
  2115. sg_compare_func
  2116. The compare-function for configuring depth- and stencil-ref tests
  2117. in pipeline objects, and for texture samplers which perform a comparison
  2118. instead of regular sampling operation.
  2119. Used in the following structs:
  2120. sg_pipeline_desc
  2121. .depth
  2122. .compare
  2123. .stencil
  2124. .front.compare
  2125. .back.compare
  2126. sg_sampler_desc
  2127. .compare
  2128. The default compare func for depth- and stencil-tests is
  2129. SG_COMPAREFUNC_ALWAYS.
  2130. The default compare func for samplers is SG_COMPAREFUNC_NEVER.
  2131. */
  2132. typedef enum sg_compare_func {
  2133. _SG_COMPAREFUNC_DEFAULT, // value 0 reserved for default-init
  2134. SG_COMPAREFUNC_NEVER,
  2135. SG_COMPAREFUNC_LESS,
  2136. SG_COMPAREFUNC_EQUAL,
  2137. SG_COMPAREFUNC_LESS_EQUAL,
  2138. SG_COMPAREFUNC_GREATER,
  2139. SG_COMPAREFUNC_NOT_EQUAL,
  2140. SG_COMPAREFUNC_GREATER_EQUAL,
  2141. SG_COMPAREFUNC_ALWAYS,
  2142. _SG_COMPAREFUNC_NUM,
  2143. _SG_COMPAREFUNC_FORCE_U32 = 0x7FFFFFFF
  2144. } sg_compare_func;
  2145. /*
  2146. sg_stencil_op
  2147. The operation performed on a currently stored stencil-value when a
  2148. comparison test passes or fails. This is used when creating a pipeline
  2149. object in the following sg_pipeline_desc struct items:
  2150. sg_pipeline_desc
  2151. .stencil
  2152. .front
  2153. .fail_op
  2154. .depth_fail_op
  2155. .pass_op
  2156. .back
  2157. .fail_op
  2158. .depth_fail_op
  2159. .pass_op
  2160. The default value is SG_STENCILOP_KEEP.
  2161. */
  2162. typedef enum sg_stencil_op {
  2163. _SG_STENCILOP_DEFAULT, // value 0 reserved for default-init
  2164. SG_STENCILOP_KEEP,
  2165. SG_STENCILOP_ZERO,
  2166. SG_STENCILOP_REPLACE,
  2167. SG_STENCILOP_INCR_CLAMP,
  2168. SG_STENCILOP_DECR_CLAMP,
  2169. SG_STENCILOP_INVERT,
  2170. SG_STENCILOP_INCR_WRAP,
  2171. SG_STENCILOP_DECR_WRAP,
  2172. _SG_STENCILOP_NUM,
  2173. _SG_STENCILOP_FORCE_U32 = 0x7FFFFFFF
  2174. } sg_stencil_op;
  2175. /*
  2176. sg_blend_factor
  2177. The source and destination factors in blending operations.
  2178. This is used in the following members when creating a pipeline object:
  2179. sg_pipeline_desc
  2180. .colors[i]
  2181. .blend
  2182. .src_factor_rgb
  2183. .dst_factor_rgb
  2184. .src_factor_alpha
  2185. .dst_factor_alpha
  2186. The default value is SG_BLENDFACTOR_ONE for source
  2187. factors, and for the destination SG_BLENDFACTOR_ZERO if the associated
  2188. blend-op is ADD, SUBTRACT or REVERSE_SUBTRACT or SG_BLENDFACTOR_ONE
  2189. if the associated blend-op is MIN or MAX.
  2190. */
  2191. typedef enum sg_blend_factor {
  2192. _SG_BLENDFACTOR_DEFAULT, // value 0 reserved for default-init
  2193. SG_BLENDFACTOR_ZERO,
  2194. SG_BLENDFACTOR_ONE,
  2195. SG_BLENDFACTOR_SRC_COLOR,
  2196. SG_BLENDFACTOR_ONE_MINUS_SRC_COLOR,
  2197. SG_BLENDFACTOR_SRC_ALPHA,
  2198. SG_BLENDFACTOR_ONE_MINUS_SRC_ALPHA,
  2199. SG_BLENDFACTOR_DST_COLOR,
  2200. SG_BLENDFACTOR_ONE_MINUS_DST_COLOR,
  2201. SG_BLENDFACTOR_DST_ALPHA,
  2202. SG_BLENDFACTOR_ONE_MINUS_DST_ALPHA,
  2203. SG_BLENDFACTOR_SRC_ALPHA_SATURATED,
  2204. SG_BLENDFACTOR_BLEND_COLOR,
  2205. SG_BLENDFACTOR_ONE_MINUS_BLEND_COLOR,
  2206. SG_BLENDFACTOR_BLEND_ALPHA,
  2207. SG_BLENDFACTOR_ONE_MINUS_BLEND_ALPHA,
  2208. _SG_BLENDFACTOR_NUM,
  2209. _SG_BLENDFACTOR_FORCE_U32 = 0x7FFFFFFF
  2210. } sg_blend_factor;
  2211. /*
  2212. sg_blend_op
  2213. Describes how the source and destination values are combined in the
  2214. fragment blending operation. It is used in the following struct items
  2215. when creating a pipeline object:
  2216. sg_pipeline_desc
  2217. .colors[i]
  2218. .blend
  2219. .op_rgb
  2220. .op_alpha
  2221. The default value is SG_BLENDOP_ADD.
  2222. */
  2223. typedef enum sg_blend_op {
  2224. _SG_BLENDOP_DEFAULT, // value 0 reserved for default-init
  2225. SG_BLENDOP_ADD,
  2226. SG_BLENDOP_SUBTRACT,
  2227. SG_BLENDOP_REVERSE_SUBTRACT,
  2228. SG_BLENDOP_MIN,
  2229. SG_BLENDOP_MAX,
  2230. _SG_BLENDOP_NUM,
  2231. _SG_BLENDOP_FORCE_U32 = 0x7FFFFFFF
  2232. } sg_blend_op;
  2233. /*
  2234. sg_color_mask
  2235. Selects the active color channels when writing a fragment color to the
  2236. framebuffer. This is used in the members
  2237. sg_pipeline_desc.colors[i].write_mask when creating a pipeline object.
  2238. The default colormask is SG_COLORMASK_RGBA (write all colors channels)
  2239. NOTE: since the color mask value 0 is reserved for the default value
  2240. (SG_COLORMASK_RGBA), use SG_COLORMASK_NONE if all color channels
  2241. should be disabled.
  2242. */
  2243. typedef enum sg_color_mask {
  2244. _SG_COLORMASK_DEFAULT = 0, // value 0 reserved for default-init
  2245. SG_COLORMASK_NONE = 0x10, // special value for 'all channels disabled
  2246. SG_COLORMASK_R = 0x1,
  2247. SG_COLORMASK_G = 0x2,
  2248. SG_COLORMASK_RG = 0x3,
  2249. SG_COLORMASK_B = 0x4,
  2250. SG_COLORMASK_RB = 0x5,
  2251. SG_COLORMASK_GB = 0x6,
  2252. SG_COLORMASK_RGB = 0x7,
  2253. SG_COLORMASK_A = 0x8,
  2254. SG_COLORMASK_RA = 0x9,
  2255. SG_COLORMASK_GA = 0xA,
  2256. SG_COLORMASK_RGA = 0xB,
  2257. SG_COLORMASK_BA = 0xC,
  2258. SG_COLORMASK_RBA = 0xD,
  2259. SG_COLORMASK_GBA = 0xE,
  2260. SG_COLORMASK_RGBA = 0xF,
  2261. _SG_COLORMASK_FORCE_U32 = 0x7FFFFFFF
  2262. } sg_color_mask;
  2263. /*
  2264. sg_load_action
  2265. Defines the load action that should be performed at the start of a render pass:
  2266. SG_LOADACTION_CLEAR: clear the render target
  2267. SG_LOADACTION_LOAD: load the previous content of the render target
  2268. SG_LOADACTION_DONTCARE: leave the render target in an undefined state
  2269. This is used in the sg_pass_action structure.
  2270. The default load action for all pass attachments is SG_LOADACTION_CLEAR,
  2271. with the values rgba = { 0.5f, 0.5f, 0.5f, 1.0f }, depth=1.0f and stencil=0.
  2272. If you want to override the default behaviour, it is important to not
  2273. only set the clear color, but the 'action' field as well (as long as this
  2274. is _SG_LOADACTION_DEFAULT, the value fields will be ignored).
  2275. */
  2276. typedef enum sg_load_action {
  2277. _SG_LOADACTION_DEFAULT,
  2278. SG_LOADACTION_CLEAR,
  2279. SG_LOADACTION_LOAD,
  2280. SG_LOADACTION_DONTCARE,
  2281. _SG_LOADACTION_FORCE_U32 = 0x7FFFFFFF
  2282. } sg_load_action;
  2283. /*
  2284. sg_store_action
  2285. Defines the store action that should be performed at the end of a render pass:
  2286. SG_STOREACTION_STORE: store the rendered content to the color attachment image
  2287. SG_STOREACTION_DONTCARE: allows the GPU to discard the rendered content
  2288. */
  2289. typedef enum sg_store_action {
  2290. _SG_STOREACTION_DEFAULT,
  2291. SG_STOREACTION_STORE,
  2292. SG_STOREACTION_DONTCARE,
  2293. _SG_STOREACTION_FORCE_U32 = 0x7FFFFFFF
  2294. } sg_store_action;
  2295. /*
  2296. sg_pass_action
  2297. The sg_pass_action struct defines the actions to be performed
  2298. at the start and end of a render pass.
  2299. - at the start of the pass: whether the render attachments should be cleared,
  2300. loaded with their previous content, or start in an undefined state
  2301. - for clear operations: the clear value (color, depth, or stencil values)
  2302. - at the end of the pass: whether the rendering result should be
  2303. stored back into the render attachment or discarded
  2304. */
  2305. typedef struct sg_color_attachment_action {
  2306. sg_load_action load_action; // default: SG_LOADACTION_CLEAR
  2307. sg_store_action store_action; // default: SG_STOREACTION_STORE
  2308. sg_color clear_value; // default: { 0.5f, 0.5f, 0.5f, 1.0f }
  2309. } sg_color_attachment_action;
  2310. typedef struct sg_depth_attachment_action {
  2311. sg_load_action load_action; // default: SG_LOADACTION_CLEAR
  2312. sg_store_action store_action; // default: SG_STOREACTION_DONTCARE
  2313. float clear_value; // default: 1.0
  2314. } sg_depth_attachment_action;
  2315. typedef struct sg_stencil_attachment_action {
  2316. sg_load_action load_action; // default: SG_LOADACTION_CLEAR
  2317. sg_store_action store_action; // default: SG_STOREACTION_DONTCARE
  2318. uint8_t clear_value; // default: 0
  2319. } sg_stencil_attachment_action;
  2320. typedef struct sg_pass_action {
  2321. sg_color_attachment_action colors[SG_MAX_COLOR_ATTACHMENTS];
  2322. sg_depth_attachment_action depth;
  2323. sg_stencil_attachment_action stencil;
  2324. } sg_pass_action;
  2325. /*
  2326. sg_swapchain
  2327. Used in sg_begin_pass() to provide details about an external swapchain
  2328. (pixel formats, sample count and backend-API specific render surface objects).
  2329. The following information must be provided:
  2330. - the width and height of the swapchain surfaces in number of pixels,
  2331. - the pixel format of the render- and optional msaa-resolve-surface
  2332. - the pixel format of the optional depth- or depth-stencil-surface
  2333. - the MSAA sample count for the render and depth-stencil surface
  2334. If the pixel formats and MSAA sample counts are left zero-initialized,
  2335. their defaults are taken from the sg_environment struct provided in the
  2336. sg_setup() call.
  2337. The width and height *must* be > 0.
  2338. Additionally the following backend API specific objects must be passed in
  2339. as 'type erased' void pointers:
  2340. GL:
  2341. - on all GL backends, a GL framebuffer object must be provided. This
  2342. can be zero for the default framebuffer.
  2343. D3D11:
  2344. - an ID3D11RenderTargetView for the rendering surface, without
  2345. MSAA rendering this surface will also be displayed
  2346. - an optional ID3D11DepthStencilView for the depth- or depth/stencil
  2347. buffer surface
  2348. - when MSAA rendering is used, another ID3D11RenderTargetView
  2349. which serves as MSAA resolve target and will be displayed
  2350. WebGPU (same as D3D11, except different types)
  2351. - a WGPUTextureView for the rendering surface, without
  2352. MSAA rendering this surface will also be displayed
  2353. - an optional WGPUTextureView for the depth- or depth/stencil
  2354. buffer surface
  2355. - when MSAA rendering is used, another WGPUTextureView
  2356. which serves as MSAA resolve target and will be displayed
  2357. Metal (NOTE that the roles of provided surfaces is slightly different
  2358. than on D3D11 or WebGPU in case of MSAA vs non-MSAA rendering):
  2359. - A current CAMetalDrawable (NOT an MTLDrawable!) which will be presented.
  2360. This will either be rendered to directly (if no MSAA is used), or serve
  2361. as MSAA-resolve target.
  2362. - an optional MTLTexture for the depth- or depth-stencil buffer
  2363. - an optional multisampled MTLTexture which serves as intermediate
  2364. rendering surface which will then be resolved into the
  2365. CAMetalDrawable.
  2366. NOTE that for Metal you must use an ObjC __bridge cast to
  2367. properly tunnel the ObjC object id through a C void*, e.g.:
  2368. swapchain.metal.current_drawable = (__bridge const void*) [mtkView currentDrawable];
  2369. On all other backends you shouldn't need to mess with the reference count.
  2370. It's a good practice to write a helper function which returns an initialized
  2371. sg_swapchain struct, which can then be plugged directly into
  2372. sg_pass.swapchain. Look at the function sglue_swapchain() in the sokol_glue.h
  2373. as an example.
  2374. */
  2375. typedef struct sg_metal_swapchain {
  2376. const void* current_drawable; // CAMetalDrawable (NOT MTLDrawable!!!)
  2377. const void* depth_stencil_texture; // MTLTexture
  2378. const void* msaa_color_texture; // MTLTexture
  2379. } sg_metal_swapchain;
  2380. typedef struct sg_d3d11_swapchain {
  2381. const void* render_view; // ID3D11RenderTargetView
  2382. const void* resolve_view; // ID3D11RenderTargetView
  2383. const void* depth_stencil_view; // ID3D11DepthStencilView
  2384. } sg_d3d11_swapchain;
  2385. typedef struct sg_wgpu_swapchain {
  2386. const void* render_view; // WGPUTextureView
  2387. const void* resolve_view; // WGPUTextureView
  2388. const void* depth_stencil_view; // WGPUTextureView
  2389. } sg_wgpu_swapchain;
  2390. typedef struct sg_vulkan_swapchain {
  2391. const void* render_image; // vkImage
  2392. const void* render_view; // vkImageView
  2393. const void* resolve_image; // vkImage
  2394. const void* resolve_view; // vkImageView
  2395. const void* depth_stencil_image; // vkImage
  2396. const void* depth_stencil_view; // vkImageView
  2397. const void* render_finished_semaphore; // vkSemaphore
  2398. const void* present_complete_semaphore; // vkSemaphore
  2399. } sg_vulkan_swapchain;
  2400. typedef struct sg_gl_swapchain {
  2401. uint32_t framebuffer; // GL framebuffer object
  2402. } sg_gl_swapchain;
  2403. typedef struct sg_swapchain {
  2404. int width;
  2405. int height;
  2406. int sample_count;
  2407. sg_pixel_format color_format;
  2408. sg_pixel_format depth_format;
  2409. sg_metal_swapchain metal;
  2410. sg_d3d11_swapchain d3d11;
  2411. sg_wgpu_swapchain wgpu;
  2412. sg_vulkan_swapchain vulkan;
  2413. sg_gl_swapchain gl;
  2414. } sg_swapchain;
  2415. /*
  2416. sg_attachments
  2417. Used in sg_pass to provide render pass attachment views. Each
  2418. type of pass attachment has it corresponding view type:
  2419. sg_attachments.colors[]:
  2420. populate with color-attachment views, e.g.:
  2421. sg_make_view(&(sg_view_desc){
  2422. .color_attachment = { ... },
  2423. });
  2424. sg_attachments.resolves[]:
  2425. populate with resolve-attachment views, e.g.:
  2426. sg_make_view(&(sg_view_desc){
  2427. .resolve_attachment = { ... },
  2428. });
  2429. sg_attachments.depth_stencil:
  2430. populate with depth-stencil-attachment views, e.g.:
  2431. sg_make_view(&(sg_view_desc){
  2432. .depth_stencil_attachment = { ... },
  2433. });
  2434. */
  2435. typedef struct sg_attachments {
  2436. sg_view colors[SG_MAX_COLOR_ATTACHMENTS];
  2437. sg_view resolves[SG_MAX_COLOR_ATTACHMENTS];
  2438. sg_view depth_stencil;
  2439. } sg_attachments;
  2440. /*
  2441. sg_pass
  2442. The sg_pass structure is passed as argument into the sg_begin_pass()
  2443. function.
  2444. For a swapchain render pass, provide an sg_pass_action and sg_swapchain
  2445. struct (for instance via the sglue_swapchain() helper function from
  2446. sokol_glue.h):
  2447. sg_begin_pass(&(sg_pass){
  2448. .action = { ... },
  2449. .swapchain = sglue_swapchain(),
  2450. });
  2451. For an offscreen render pass, provide an sg_pass_action struct with
  2452. attachment view objects:
  2453. sg_begin_pass(&(sg_pass){
  2454. .action = { ... },
  2455. .attachments = {
  2456. .colors = { ... },
  2457. .resolves = { ... },
  2458. .depth_stencil = ...,
  2459. },
  2460. });
  2461. You can also omit the .action object to get default pass action behaviour
  2462. (clear to color=grey, depth=1 and stencil=0).
  2463. For a compute pass, just set the sg_pass.compute boolean to true:
  2464. sg_begin_pass(&(sg_pass){ .compute = true });
  2465. */
  2466. typedef struct sg_pass {
  2467. uint32_t _start_canary;
  2468. bool compute;
  2469. sg_pass_action action;
  2470. sg_attachments attachments;
  2471. sg_swapchain swapchain;
  2472. const char* label;
  2473. uint32_t _end_canary;
  2474. } sg_pass;
  2475. /*
  2476. sg_bindings
  2477. The sg_bindings structure defines the resource bindings for
  2478. the next draw call.
  2479. To update the resource bindings, call sg_apply_bindings() with
  2480. a pointer to a populated sg_bindings struct. Note that
  2481. sg_apply_bindings() must be called after sg_apply_pipeline()
  2482. and that bindings are not preserved across sg_apply_pipeline()
  2483. calls, even when the new pipeline uses the same 'bindings layout'.
  2484. A resource binding struct contains:
  2485. - 1..N vertex buffers
  2486. - 1..N vertex buffer offsets
  2487. - 0..1 index buffer
  2488. - 0..1 index buffer offset
  2489. - 0..N resource views (texture-, storage-image, storage-buffer-views)
  2490. - 0..N samplers
  2491. Where 'N' is defined in the following constants:
  2492. - SG_MAX_VERTEXBUFFER_BINDSLOTS
  2493. - SG_MAX_VIEW_BINDSLOTS
  2494. - SG_MAX_SAMPLER_BINDSLOTS
  2495. Note that inside compute passes vertex- and index-buffer-bindings are
  2496. disallowed.
  2497. When using sokol-shdc for shader authoring, the `layout(binding=N)`
  2498. for texture-, storage-image- and storage-buffer-bindings directly
  2499. maps to the views-array index, for instance the following vertex-
  2500. and fragment-shader interface for sokol-shdc:
  2501. @vs vs
  2502. layout(binding=0) uniform vs_params { ... };
  2503. layout(binding=0) readonly buffer ssbo { ... };
  2504. layout(binding=1) uniform texture2D vs_tex;
  2505. layout(binding=0) uniform sampler vs_smp;
  2506. ...
  2507. @end
  2508. @fs fs
  2509. layout(binding=1) uniform fs_params { ... };
  2510. layout(binding=2) uniform texture2D fs_tex;
  2511. layout(binding=1) uniform sampler fs_smp;
  2512. ...
  2513. @end
  2514. ...would map to the following sg_bindings struct:
  2515. const sg_bindings bnd = {
  2516. .vertex_buffers[0] = ...,
  2517. .views[0] = ssbo_view,
  2518. .views[1] = vs_tex_view,
  2519. .views[2] = fs_tex_view,
  2520. .samplers[0] = vs_smp,
  2521. .samplers[1] = fs_smp,
  2522. };
  2523. ...alternatively you can use code-generated slot indices:
  2524. const sg_bindings bnd = {
  2525. .vertex_buffers[0] = ...,
  2526. .views[VIEW_ssbo] = ssbo_view,
  2527. .views[VIEW_vs_tex] = vs_tex_view,
  2528. .views[VIEW_fs_tex] = fs_tex_view,
  2529. .samplers[SMP_vs_smp] = vs_smp,
  2530. .samplers[SMP_fs_smp] = fs_smp,
  2531. };
  2532. Resource bindslots for a specific shader/pipeline may have gaps, and an
  2533. sg_bindings struct may have populated bind slots which are not used by a
  2534. specific shader. This allows to use the same sg_bindings struct across
  2535. different shader variants.
  2536. When not using sokol-shdc, the bindslot indices in the sg_bindings
  2537. struct need to match the per-binding reflection info slot indices
  2538. in the sg_shader_desc struct (for details about that see the
  2539. sg_shader_desc struct documentation).
  2540. The optional buffer offsets can be used to put different unrelated
  2541. chunks of vertex- and/or index-data into the same buffer objects.
  2542. */
  2543. typedef struct sg_bindings {
  2544. uint32_t _start_canary;
  2545. sg_buffer vertex_buffers[SG_MAX_VERTEXBUFFER_BINDSLOTS];
  2546. int vertex_buffer_offsets[SG_MAX_VERTEXBUFFER_BINDSLOTS];
  2547. sg_buffer index_buffer;
  2548. int index_buffer_offset;
  2549. sg_view views[SG_MAX_VIEW_BINDSLOTS];
  2550. sg_sampler samplers[SG_MAX_SAMPLER_BINDSLOTS];
  2551. uint32_t _end_canary;
  2552. } sg_bindings;
  2553. /*
  2554. sg_buffer_usage
  2555. Describes how a buffer object is going to be used:
  2556. .vertex_buffer (default: true)
  2557. the buffer will be bound as vertex buffer via sg_bindings.vertex_buffers[]
  2558. .index_buffer (default: false)
  2559. the buffer will be bound as index buffer via sg_bindings.index_buffer
  2560. .storage_buffer (default: false)
  2561. the buffer will be bound as storage buffer via storage-buffer-view
  2562. in sg_bindings.views[]
  2563. .immutable (default: true)
  2564. the buffer content will never be updated from the CPU side (but
  2565. may be written to by a compute shader)
  2566. .dynamic_update (default: false)
  2567. the buffer content will be infrequently updated from the CPU side
  2568. .stream_upate (default: false)
  2569. the buffer content will be updated each frame from the CPU side
  2570. */
  2571. typedef struct sg_buffer_usage {
  2572. bool vertex_buffer;
  2573. bool index_buffer;
  2574. bool storage_buffer;
  2575. bool immutable;
  2576. bool dynamic_update;
  2577. bool stream_update;
  2578. } sg_buffer_usage;
  2579. /*
  2580. sg_buffer_desc
  2581. Creation parameters for sg_buffer objects, used in the sg_make_buffer() call.
  2582. The default configuration is:
  2583. .size: 0 (*must* be >0 for buffers without data)
  2584. .usage { .vertex_buffer = true, .immutable = true }
  2585. .data.ptr 0 (*must* be valid for immutable buffers without storage buffer usage)
  2586. .data.size 0 (*must* be > 0 for immutable buffers without storage buffer usage)
  2587. .label 0 (optional string label)
  2588. For immutable buffers which are initialized with initial data,
  2589. keep the .size item zero-initialized, and set the size together with the
  2590. pointer to the initial data in the .data item.
  2591. For immutable or mutable buffers without initial data, keep the .data item
  2592. zero-initialized, and set the buffer size in the .size item instead.
  2593. You can also set both size values, but currently both size values must
  2594. be identical (this may change in the future when the dynamic resource
  2595. management may become more flexible).
  2596. NOTE: Immutable buffers without storage-buffer-usage *must* be created
  2597. with initial content, this restriction doesn't apply to storage buffer usage,
  2598. because storage buffers may also get their initial content by running
  2599. a compute shader on them.
  2600. NOTE: Buffers without initial data will have undefined content, e.g.
  2601. do *not* expect the buffer to be zero-initialized!
  2602. ADVANCED TOPIC: Injecting native 3D-API buffers:
  2603. The following struct members allow to inject your own GL, Metal
  2604. or D3D11 buffers into sokol_gfx:
  2605. .gl_buffers[SG_NUM_INFLIGHT_FRAMES]
  2606. .mtl_buffers[SG_NUM_INFLIGHT_FRAMES]
  2607. .d3d11_buffer
  2608. You must still provide all other struct items except the .data item, and
  2609. these must match the creation parameters of the native buffers you provide.
  2610. For sg_buffer_desc.usage.immutable buffers, only provide a single native
  2611. 3D-API buffer, otherwise you need to provide SG_NUM_INFLIGHT_FRAMES buffers
  2612. (only for GL and Metal, not D3D11). Providing multiple buffers for GL and
  2613. Metal is necessary because sokol_gfx will rotate through them when calling
  2614. sg_update_buffer() to prevent lock-stalls.
  2615. Note that it is expected that immutable injected buffer have already been
  2616. initialized with content, and the .content member must be 0!
  2617. Also you need to call sg_reset_state_cache() after calling native 3D-API
  2618. functions, and before calling any sokol_gfx function.
  2619. */
  2620. typedef struct sg_buffer_desc {
  2621. uint32_t _start_canary;
  2622. size_t size;
  2623. sg_buffer_usage usage;
  2624. sg_range data;
  2625. const char* label;
  2626. // optionally inject backend-specific resources
  2627. uint32_t gl_buffers[SG_NUM_INFLIGHT_FRAMES];
  2628. const void* mtl_buffers[SG_NUM_INFLIGHT_FRAMES];
  2629. const void* d3d11_buffer;
  2630. const void* wgpu_buffer;
  2631. uint32_t _end_canary;
  2632. } sg_buffer_desc;
  2633. /*
  2634. sg_image_usage
  2635. Describes the intended usage of an image object:
  2636. .storage_image (default: false)
  2637. the image can be used as parent resource of a storage-image-view,
  2638. which allows compute shaders to write to the image in a compute
  2639. pass (for read-only access in compute shaders bind the image
  2640. via a texture view instead
  2641. .color_attachment (default: false)
  2642. the image can be used as parent resource of a color-attachment-view,
  2643. which is then passed into sg_begin_pass via sg_pass.attachments.colors[]
  2644. so that fragment shaders can render into the image
  2645. .resolve_attachment (default: false)
  2646. the image can be used as parent resource of a resolve-attachment-view,
  2647. which is then passed into sg_begin_pass via sg_pass.attachments.resolves[]
  2648. as target for an MSAA-resolve operation in sg_end_pass()
  2649. .depth_stencil_attachment (default: false)
  2650. the image can be used as parent resource of a depth-stencil-attachmnet-view
  2651. which is then passes into sg_begin_pass via sg_pass.attachments.depth_stencil
  2652. as depth-stencil-buffer
  2653. .immutable (default: true)
  2654. the image content cannot be updated from the CPU side
  2655. (but may be updated by the GPU in a render- or compute-pass)
  2656. .dynamic_update (default: false)
  2657. the image content is updated infrequently by the CPU
  2658. .stream_update (default: false)
  2659. the image content is updated each frame by the CPU via
  2660. Note that creating a texture view from the image to be used for
  2661. texture-sampling in vertex-, fragment- or compute-shaders
  2662. is always implicitly allowed.
  2663. */
  2664. typedef struct sg_image_usage {
  2665. bool storage_image;
  2666. bool color_attachment;
  2667. bool resolve_attachment;
  2668. bool depth_stencil_attachment;
  2669. bool immutable;
  2670. bool dynamic_update;
  2671. bool stream_update;
  2672. } sg_image_usage;
  2673. /*
  2674. sg_view_type
  2675. Allows to query the type of a view object via the function sg_query_view_type()
  2676. */
  2677. typedef enum sg_view_type {
  2678. SG_VIEWTYPE_INVALID,
  2679. SG_VIEWTYPE_STORAGEBUFFER,
  2680. SG_VIEWTYPE_STORAGEIMAGE,
  2681. SG_VIEWTYPE_TEXTURE,
  2682. SG_VIEWTYPE_COLORATTACHMENT,
  2683. SG_VIEWTYPE_RESOLVEATTACHMENT,
  2684. SG_VIEWTYPE_DEPTHSTENCILATTACHMENT,
  2685. _SG_VIEWTYPE_FORCE_U32 = 0x7FFFFFFF
  2686. } sg_view_type;
  2687. /*
  2688. sg_image_data
  2689. Defines the content of an image through an array of sg_range structs, each
  2690. range pointing to the pixel data for one mip-level. For array-, cubemap- and
  2691. 3D-images each mip-level contains all slice-surfaces for that mip-level in a
  2692. single tightly packed memory block.
  2693. The size of a single surface in a mip-level for a regular 2D texture
  2694. can be computed via:
  2695. sg_query_surface_pitch(pixel_format, mip_width, mip_height, 1);
  2696. For array- and 3d-images the size of a single miplevel is:
  2697. num_slices * sg_query_surface_pitch(pixel_format, mip_width, mip_height, 1);
  2698. For cubemap-images the size of a single mip-level is:
  2699. 6 * sg_query_surface_pitch(pixel_format, mip_width, mip_height, 1);
  2700. The order of cubemap-faces is in a mip-level data chunk is:
  2701. [0] => +X
  2702. [1] => -X
  2703. [2] => +Y
  2704. [3] => -Y
  2705. [4] => +Z
  2706. [5] => -Z
  2707. */
  2708. typedef struct sg_image_data {
  2709. sg_range mip_levels[SG_MAX_MIPMAPS];
  2710. } sg_image_data;
  2711. /*
  2712. sg_image_desc
  2713. Creation parameters for sg_image objects, used in the sg_make_image() call.
  2714. The default configuration is:
  2715. .type SG_IMAGETYPE_2D
  2716. .usage .immutable = true
  2717. .width 0 (must be set to >0)
  2718. .height 0 (must be set to >0)
  2719. .num_slices 1 (3D textures: depth; array textures: number of layers)
  2720. .num_mipmaps 1
  2721. .pixel_format SG_PIXELFORMAT_RGBA8 for textures, or sg_desc.environment.defaults.color_format for render targets
  2722. .sample_count 1 for textures, or sg_desc.environment.defaults.sample_count for render targets
  2723. .data an sg_image_data struct to define the initial content
  2724. .label 0 (optional string label for trace hooks)
  2725. Q: Why is the default sample_count for render targets identical with the
  2726. "default sample count" from sg_desc.environment.defaults.sample_count?
  2727. A: So that it matches the default sample count in pipeline objects. Even
  2728. though it is a bit strange/confusing that offscreen render targets by default
  2729. get the same sample count as 'default swapchains', but it's better that
  2730. an offscreen render target created with default parameters matches
  2731. a pipeline object created with default parameters.
  2732. NOTE:
  2733. Regular images used as texture binding with usage.immutable must be fully
  2734. initialized by providing a valid .data member which points to initialization
  2735. data.
  2736. Images with usage.*_attachment or usage.storage_image must
  2737. *not* be created with initial content. Be aware that the initial
  2738. content of pass attachment and storage images is undefined
  2739. (not guaranteed to be zeroed).
  2740. ADVANCED TOPIC: Injecting native 3D-API textures:
  2741. The following struct members allow to inject your own GL, Metal or D3D11
  2742. textures into sokol_gfx:
  2743. .gl_textures[SG_NUM_INFLIGHT_FRAMES]
  2744. .mtl_textures[SG_NUM_INFLIGHT_FRAMES]
  2745. .d3d11_texture
  2746. .wgpu_texture
  2747. For GL, you can also specify the texture target or leave it empty to use
  2748. the default texture target for the image type (GL_TEXTURE_2D for
  2749. SG_IMAGETYPE_2D etc)
  2750. The same rules apply as for injecting native buffers (see sg_buffer_desc
  2751. documentation for more details).
  2752. */
  2753. typedef struct sg_image_desc {
  2754. uint32_t _start_canary;
  2755. sg_image_type type;
  2756. sg_image_usage usage;
  2757. int width;
  2758. int height;
  2759. int num_slices;
  2760. int num_mipmaps;
  2761. sg_pixel_format pixel_format;
  2762. int sample_count;
  2763. sg_image_data data;
  2764. const char* label;
  2765. // optionally inject backend-specific resources
  2766. uint32_t gl_textures[SG_NUM_INFLIGHT_FRAMES];
  2767. uint32_t gl_texture_target;
  2768. const void* mtl_textures[SG_NUM_INFLIGHT_FRAMES];
  2769. const void* d3d11_texture;
  2770. const void* wgpu_texture;
  2771. uint32_t _end_canary;
  2772. } sg_image_desc;
  2773. /*
  2774. sg_sampler_desc
  2775. Creation parameters for sg_sampler objects, used in the sg_make_sampler() call
  2776. .min_filter: SG_FILTER_NEAREST
  2777. .mag_filter: SG_FILTER_NEAREST
  2778. .mipmap_filter SG_FILTER_NEAREST
  2779. .wrap_u: SG_WRAP_REPEAT
  2780. .wrap_v: SG_WRAP_REPEAT
  2781. .wrap_w: SG_WRAP_REPEAT (only SG_IMAGETYPE_3D)
  2782. .min_lod 0.0f
  2783. .max_lod FLT_MAX
  2784. .border_color SG_BORDERCOLOR_OPAQUE_BLACK
  2785. .compare SG_COMPAREFUNC_NEVER
  2786. .max_anisotropy 1 (must be 1..16)
  2787. */
  2788. typedef struct sg_sampler_desc {
  2789. uint32_t _start_canary;
  2790. sg_filter min_filter;
  2791. sg_filter mag_filter;
  2792. sg_filter mipmap_filter;
  2793. sg_wrap wrap_u;
  2794. sg_wrap wrap_v;
  2795. sg_wrap wrap_w;
  2796. float min_lod;
  2797. float max_lod;
  2798. sg_border_color border_color;
  2799. sg_compare_func compare;
  2800. uint32_t max_anisotropy;
  2801. const char* label;
  2802. // optionally inject backend-specific resources
  2803. uint32_t gl_sampler;
  2804. const void* mtl_sampler;
  2805. const void* d3d11_sampler;
  2806. const void* wgpu_sampler;
  2807. uint32_t _end_canary;
  2808. } sg_sampler_desc;
  2809. /*
  2810. sg_shader_desc
  2811. Used as parameter of sg_make_shader() to create a shader object which
  2812. communicates shader source or bytecode and shader interface
  2813. reflection information to sokol-gfx.
  2814. If you use sokol-shdc you can ignore the following information since
  2815. the sg_shader_desc struct will be code-generated.
  2816. Otherwise you need to provide the following information to the
  2817. sg_make_shader() call:
  2818. - a vertex- and fragment-shader function:
  2819. - the shader source or bytecode
  2820. - an optional entry point name
  2821. - for D3D11: an optional compile target when source code is provided
  2822. (the defaults are "vs_4_0" and "ps_4_0")
  2823. - ...or alternatively, a compute function:
  2824. - the shader source or bytecode
  2825. - an optional entry point name
  2826. - for D3D11: an optional compile target when source code is provided
  2827. (the default is "cs_5_0")
  2828. - vertex attributes required by some backends (not for compute shaders):
  2829. - the vertex attribute base type (undefined, float, signed int, unsigned int),
  2830. this information is only used in the validation layer to check that the
  2831. pipeline object vertex formats are compatible with the input vertex attribute
  2832. type used in the vertex shader. NOTE that the default base type
  2833. 'undefined' skips the validation layer check.
  2834. - for the GL backend: optional vertex attribute names used for name lookup
  2835. - for the D3D11 backend: semantic names and indices
  2836. - only for compute shaders on the Metal backend:
  2837. - the workgroup size aka 'threads per thread-group'
  2838. In other 3D APIs this is declared in the shader code:
  2839. - GLSL: `layout(local_size_x=x, local_size_y=y, local_size_y=z) in;`
  2840. - HLSL: `[numthreads(x, y, z)]`
  2841. - WGSL: `@workgroup_size(x, y, z)`
  2842. ...but in Metal the workgroup size is declared on the CPU side
  2843. - reflection information for each uniform block binding used by the shader:
  2844. - the shader stage the uniform block appears in (SG_SHADERSTAGE_*)
  2845. - the size in bytes of the uniform block
  2846. - backend-specific bindslots:
  2847. - HLSL: the constant buffer register `register(b0..7)`
  2848. - MSL: the buffer attribute `[[buffer(0..7)]]`
  2849. - WGSL: the binding in `@group(0) @binding(0..15)`
  2850. - GLSL only: a description of the uniform block interior
  2851. - the memory layout standard (SG_UNIFORMLAYOUT_*)
  2852. - for each member in the uniform block:
  2853. - the member type (SG_UNIFORM_*)
  2854. - if the member is an array, the array count
  2855. - the member name
  2856. - reflection information for each texture-, storage-buffer and
  2857. storage-image bindings by the shader, each with an associated
  2858. view type:
  2859. - texture bindings => texture views
  2860. - storage-buffer bindings => storage-buffer views
  2861. - storage-image bindings => storage-image views
  2862. - texture bindings must provide the following information:
  2863. - the shader stage the texture binding appears in (SG_SHADERSTAGE_*)
  2864. - the image type (SG_IMAGETYPE_*)
  2865. - the image-sample type (SG_IMAGESAMPLETYPE_*)
  2866. - whether the texture is multisampled
  2867. - backend specific bindslots:
  2868. - HLSL: the texture register `register(t0..31)`
  2869. - MSL: the texture attribute `[[texture(0..31)]]`
  2870. - WGSL: the binding in `@group(1) @binding(0..127)`
  2871. - storage-buffer bindings must provide the following information:
  2872. - the shader stage the storage buffer appears in (SG_SHADERSTAGE_*)
  2873. - whether the storage buffer is readonly
  2874. - backend specific bindslots:
  2875. - HLSL:
  2876. - for storage buffer bindings: `register(t0..31)`
  2877. - for read/write storage buffer bindings: `register(u0..31)`
  2878. - MSL: the buffer attribute `[[buffer(8..23)]]`
  2879. - WGSL: the binding in `@group(1) @binding(0..127)`
  2880. - GL: the binding in `layout(binding=0..sg_limits.max_storage_buffer_bindings_per_stage)`
  2881. - storage-image bindings must provide the following information:
  2882. - the shader stage (*must* be SG_SHADERSTAGE_COMPUTE)
  2883. - whether the storage image is writeonly or readwrite (for readonly
  2884. access use a regular texture binding instead)
  2885. - the image type expected by the shader (SG_IMAGETYPE_*)
  2886. - the access pixel format expected by the shader (SG_PIXELFORMAT_*),
  2887. note that only a subset of pixel formats is allowed for storage image
  2888. bindings
  2889. - backend specific bindslots:
  2890. - HLSL: the UAV register `register(u0..31)`
  2891. - MSL: the texture attribute `[[texture(0..31)]]`
  2892. - WGSL: the binding in `@group(1) @binding(0..127)`
  2893. - GLSL: the binding in `layout(binding=0..sg_imits.max_storage_buffer_bindings_per_stage, [access_format])`
  2894. - reflection information for each sampler used by the shader:
  2895. - the shader stage the sampler appears in (SG_SHADERSTAGE_*)
  2896. - the sampler type (SG_SAMPLERTYPE_*)
  2897. - backend specific bindslots:
  2898. - HLSL: the sampler register `register(s0..11)`
  2899. - MSL: the sampler attribute `[[sampler(0..11)]]`
  2900. - WGSL: the binding in `@group(0) @binding(0..127)`
  2901. - reflection information for each texture-sampler pair used by
  2902. the shader:
  2903. - the shader stage (SG_SHADERSTAGE_*)
  2904. - the texture's array index in the sg_shader_desc.views[] array
  2905. - the sampler's array index in the sg_shader_desc.samplers[] array
  2906. - GLSL only: the name of the combined image-sampler object
  2907. The number and order of items in the sg_shader_desc.attrs[]
  2908. array corresponds to the items in sg_pipeline_desc.layout.attrs.
  2909. - sg_shader_desc.attrs[N] => sg_pipeline_desc.layout.attrs[N]
  2910. NOTE that vertex attribute indices currently cannot have gaps.
  2911. The items index in the sg_shader_desc.uniform_blocks[] array corresponds
  2912. to the ub_slot arg in sg_apply_uniforms():
  2913. - sg_shader_desc.uniform_blocks[N] => sg_apply_uniforms(N, ...)
  2914. The items in the sg_shader_desc.views[] array directly map to
  2915. the views in the sg_bindings.views[] array!
  2916. For all GL backends, shader source-code must be provided. For D3D11 and Metal,
  2917. either shader source-code or byte-code can be provided.
  2918. NOTE that the uniform-block, view and sampler arrays may have gaps. This
  2919. allows to use the same sg_bindings struct for different but related
  2920. shader variations.
  2921. For D3D11, if source code is provided, the d3dcompiler_47.dll will be loaded
  2922. on demand. If this fails, shader creation will fail. When compiling HLSL
  2923. source code, you can provide an optional target string via
  2924. sg_shader_stage_desc.d3d11_target, the default target is "vs_4_0" for the
  2925. vertex shader stage and "ps_4_0" for the pixel shader stage.
  2926. You may optionally provide the file path to enable the default #include handler
  2927. behavior when compiling source code.
  2928. */
  2929. typedef enum sg_shader_stage {
  2930. SG_SHADERSTAGE_NONE,
  2931. SG_SHADERSTAGE_VERTEX,
  2932. SG_SHADERSTAGE_FRAGMENT,
  2933. SG_SHADERSTAGE_COMPUTE,
  2934. _SG_SHADERSTAGE_FORCE_U32 = 0x7FFFFFFF,
  2935. } sg_shader_stage;
  2936. typedef struct sg_shader_function {
  2937. const char* source;
  2938. sg_range bytecode;
  2939. const char* entry;
  2940. const char* d3d11_target; // default: "vs_4_0" or "ps_4_0"
  2941. const char* d3d11_filepath;
  2942. } sg_shader_function;
  2943. typedef enum sg_shader_attr_base_type {
  2944. SG_SHADERATTRBASETYPE_UNDEFINED,
  2945. SG_SHADERATTRBASETYPE_FLOAT,
  2946. SG_SHADERATTRBASETYPE_SINT,
  2947. SG_SHADERATTRBASETYPE_UINT,
  2948. _SG_SHADERATTRBASETYPE_FORCE_U32 = 0x7FFFFFFF,
  2949. } sg_shader_attr_base_type;
  2950. typedef struct sg_shader_vertex_attr {
  2951. sg_shader_attr_base_type base_type; // default: UNDEFINED (disables validation)
  2952. const char* glsl_name; // [optional] GLSL attribute name
  2953. const char* hlsl_sem_name; // HLSL semantic name
  2954. uint8_t hlsl_sem_index; // HLSL semantic index
  2955. } sg_shader_vertex_attr;
  2956. typedef struct sg_glsl_shader_uniform {
  2957. sg_uniform_type type;
  2958. uint16_t array_count; // 0 or 1 for scalars, >1 for arrays
  2959. const char* glsl_name; // glsl name binding is required on GL 4.1 and WebGL2
  2960. } sg_glsl_shader_uniform;
  2961. typedef struct sg_shader_uniform_block {
  2962. sg_shader_stage stage;
  2963. uint32_t size;
  2964. uint8_t hlsl_register_b_n; // HLSL register(bn)
  2965. uint8_t msl_buffer_n; // MSL [[buffer(n)]]
  2966. uint8_t wgsl_group0_binding_n; // WGSL @group(0) @binding(n)
  2967. uint8_t spirv_set0_binding_n; // Vulkan GLSL layout(set=0, binding=n)
  2968. sg_uniform_layout layout;
  2969. sg_glsl_shader_uniform glsl_uniforms[SG_MAX_UNIFORMBLOCK_MEMBERS];
  2970. } sg_shader_uniform_block;
  2971. typedef struct sg_shader_texture_view {
  2972. sg_shader_stage stage;
  2973. sg_image_type image_type;
  2974. sg_image_sample_type sample_type;
  2975. bool multisampled;
  2976. uint8_t hlsl_register_t_n; // HLSL register(tn) bind slot
  2977. uint8_t msl_texture_n; // MSL [[texture(n)]] bind slot
  2978. uint8_t wgsl_group1_binding_n; // WGSL @group(1) @binding(n) bind slot
  2979. uint8_t spirv_set1_binding_n; // Vulkan GLSL layout(set=1, binding=0)
  2980. } sg_shader_texture_view;
  2981. typedef struct sg_shader_storage_buffer_view {
  2982. sg_shader_stage stage;
  2983. bool readonly;
  2984. uint8_t hlsl_register_t_n; // HLSL register(tn) bind slot (for readonly access)
  2985. uint8_t hlsl_register_u_n; // HLSL register(un) bind slot (for read/write access)
  2986. uint8_t msl_buffer_n; // MSL [[buffer(n)]] bind slot
  2987. uint8_t wgsl_group1_binding_n; // WGSL @group(1) @binding(n) bind slot
  2988. uint8_t spirv_set1_binding_n; // Vulkan GLSL layout(set=1, binding=0)
  2989. uint8_t glsl_binding_n; // GLSL layout(binding=n)
  2990. } sg_shader_storage_buffer_view;
  2991. typedef struct sg_shader_storage_image_view {
  2992. sg_shader_stage stage;
  2993. sg_image_type image_type;
  2994. sg_pixel_format access_format; // shader-access pixel format
  2995. bool writeonly; // false means read/write access
  2996. uint8_t hlsl_register_u_n; // HLSL register(un) bind slot
  2997. uint8_t msl_texture_n; // MSL [[texture(n)]] bind slot
  2998. uint8_t wgsl_group1_binding_n; // WGSL @group(2) @binding(n) bind slot
  2999. uint8_t spirv_set1_binding_n; // Vulkan GLSL layout(set=1, binding=0)
  3000. uint8_t glsl_binding_n; // GLSL layout(binding=n)
  3001. } sg_shader_storage_image_view;
  3002. typedef struct sg_shader_view {
  3003. sg_shader_texture_view texture;
  3004. sg_shader_storage_buffer_view storage_buffer;
  3005. sg_shader_storage_image_view storage_image;
  3006. } sg_shader_view;
  3007. typedef struct sg_shader_sampler {
  3008. sg_shader_stage stage;
  3009. sg_sampler_type sampler_type;
  3010. uint8_t hlsl_register_s_n; // HLSL register(sn) bind slot
  3011. uint8_t msl_sampler_n; // MSL [[sampler(n)]] bind slot
  3012. uint8_t wgsl_group1_binding_n; // WGSL @group(1) @binding(n) bind slot
  3013. uint8_t spirv_set1_binding_n; // Vulkan GLSL layout(set=1, binding=0)
  3014. } sg_shader_sampler;
  3015. typedef struct sg_shader_texture_sampler_pair {
  3016. sg_shader_stage stage;
  3017. uint8_t view_slot; // must be SG_VIEWTYPE_TEXTURE
  3018. uint8_t sampler_slot;
  3019. const char* glsl_name; // glsl name binding required because of GL 4.1 and WebGL2
  3020. } sg_shader_texture_sampler_pair;
  3021. typedef struct sg_mtl_shader_threads_per_threadgroup {
  3022. int x, y, z;
  3023. } sg_mtl_shader_threads_per_threadgroup;
  3024. typedef struct sg_shader_desc {
  3025. uint32_t _start_canary;
  3026. sg_shader_function vertex_func;
  3027. sg_shader_function fragment_func;
  3028. sg_shader_function compute_func;
  3029. sg_shader_vertex_attr attrs[SG_MAX_VERTEX_ATTRIBUTES];
  3030. sg_shader_uniform_block uniform_blocks[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  3031. sg_shader_view views[SG_MAX_VIEW_BINDSLOTS];
  3032. sg_shader_sampler samplers[SG_MAX_SAMPLER_BINDSLOTS];
  3033. sg_shader_texture_sampler_pair texture_sampler_pairs[SG_MAX_TEXTURE_SAMPLER_PAIRS];
  3034. sg_mtl_shader_threads_per_threadgroup mtl_threads_per_threadgroup;
  3035. const char* label;
  3036. uint32_t _end_canary;
  3037. } sg_shader_desc;
  3038. /*
  3039. sg_pipeline_desc
  3040. The sg_pipeline_desc struct defines all creation parameters for an
  3041. sg_pipeline object, used as argument to the sg_make_pipeline() function:
  3042. Pipeline objects come in two flavours:
  3043. - render pipelines for use in render passes
  3044. - compute pipelines for use in compute passes
  3045. A compute pipeline only requires a compute shader object but no
  3046. 'render state', while a render pipeline requires a vertex/fragment shader
  3047. object and additional render state declarations:
  3048. - the vertex layout for all input vertex buffers
  3049. - a shader object
  3050. - the 3D primitive type (points, lines, triangles, ...)
  3051. - the index type (none, 16- or 32-bit)
  3052. - all the fixed-function-pipeline state (depth-, stencil-, blend-state, etc...)
  3053. If the vertex data has no gaps between vertex components, you can omit
  3054. the .layout.buffers[].stride and layout.attrs[].offset items (leave them
  3055. default-initialized to 0), sokol-gfx will then compute the offsets and
  3056. strides from the vertex component formats (.layout.attrs[].format).
  3057. Please note that ALL vertex attribute offsets must be 0 in order for the
  3058. automatic offset computation to kick in.
  3059. Note that if you use vertex-pulling from storage buffers instead of
  3060. fixed-function vertex input you can simply omit the entire nested .layout
  3061. struct.
  3062. The default configuration is as follows:
  3063. .compute: false (must be set to true for a compute pipeline)
  3064. .shader: 0 (must be initialized with a valid sg_shader id!)
  3065. .layout:
  3066. .buffers[]: vertex buffer layouts
  3067. .stride: 0 (if no stride is given it will be computed)
  3068. .step_func SG_VERTEXSTEP_PER_VERTEX
  3069. .step_rate 1
  3070. .attrs[]: vertex attribute declarations
  3071. .buffer_index 0 the vertex buffer bind slot
  3072. .offset 0 (offsets can be omitted if the vertex layout has no gaps)
  3073. .format SG_VERTEXFORMAT_INVALID (must be initialized!)
  3074. .depth:
  3075. .pixel_format: sg_desc.context.depth_format
  3076. .compare: SG_COMPAREFUNC_ALWAYS
  3077. .write_enabled: false
  3078. .bias: 0.0f
  3079. .bias_slope_scale: 0.0f
  3080. .bias_clamp: 0.0f
  3081. .stencil:
  3082. .enabled: false
  3083. .front/back:
  3084. .compare: SG_COMPAREFUNC_ALWAYS
  3085. .fail_op: SG_STENCILOP_KEEP
  3086. .depth_fail_op: SG_STENCILOP_KEEP
  3087. .pass_op: SG_STENCILOP_KEEP
  3088. .read_mask: 0
  3089. .write_mask: 0
  3090. .ref: 0
  3091. .color_count 1
  3092. .colors[0..color_count]
  3093. .pixel_format sg_desc.context.color_format
  3094. .write_mask: SG_COLORMASK_RGBA
  3095. .blend:
  3096. .enabled: false
  3097. .src_factor_rgb: SG_BLENDFACTOR_ONE
  3098. .dst_factor_rgb: SG_BLENDFACTOR_ZERO
  3099. .op_rgb: SG_BLENDOP_ADD
  3100. .src_factor_alpha: SG_BLENDFACTOR_ONE
  3101. .dst_factor_alpha: SG_BLENDFACTOR_ZERO
  3102. .op_alpha: SG_BLENDOP_ADD
  3103. .primitive_type: SG_PRIMITIVETYPE_TRIANGLES
  3104. .index_type: SG_INDEXTYPE_NONE
  3105. .cull_mode: SG_CULLMODE_NONE
  3106. .face_winding: SG_FACEWINDING_CW
  3107. .sample_count: sg_desc.context.sample_count
  3108. .blend_color: (sg_color) { 0.0f, 0.0f, 0.0f, 0.0f }
  3109. .alpha_to_coverage_enabled: false
  3110. .label 0 (optional string label for trace hooks)
  3111. */
  3112. typedef struct sg_vertex_buffer_layout_state {
  3113. int stride;
  3114. sg_vertex_step step_func;
  3115. int step_rate;
  3116. } sg_vertex_buffer_layout_state;
  3117. typedef struct sg_vertex_attr_state {
  3118. int buffer_index;
  3119. int offset;
  3120. sg_vertex_format format;
  3121. } sg_vertex_attr_state;
  3122. typedef struct sg_vertex_layout_state {
  3123. sg_vertex_buffer_layout_state buffers[SG_MAX_VERTEXBUFFER_BINDSLOTS];
  3124. sg_vertex_attr_state attrs[SG_MAX_VERTEX_ATTRIBUTES];
  3125. } sg_vertex_layout_state;
  3126. typedef struct sg_stencil_face_state {
  3127. sg_compare_func compare;
  3128. sg_stencil_op fail_op;
  3129. sg_stencil_op depth_fail_op;
  3130. sg_stencil_op pass_op;
  3131. } sg_stencil_face_state;
  3132. typedef struct sg_stencil_state {
  3133. bool enabled;
  3134. sg_stencil_face_state front;
  3135. sg_stencil_face_state back;
  3136. uint8_t read_mask;
  3137. uint8_t write_mask;
  3138. uint8_t ref;
  3139. } sg_stencil_state;
  3140. typedef struct sg_depth_state {
  3141. sg_pixel_format pixel_format;
  3142. sg_compare_func compare;
  3143. bool write_enabled;
  3144. float bias;
  3145. float bias_slope_scale;
  3146. float bias_clamp;
  3147. } sg_depth_state;
  3148. typedef struct sg_blend_state {
  3149. bool enabled;
  3150. sg_blend_factor src_factor_rgb;
  3151. sg_blend_factor dst_factor_rgb;
  3152. sg_blend_op op_rgb;
  3153. sg_blend_factor src_factor_alpha;
  3154. sg_blend_factor dst_factor_alpha;
  3155. sg_blend_op op_alpha;
  3156. } sg_blend_state;
  3157. typedef struct sg_color_target_state {
  3158. sg_pixel_format pixel_format;
  3159. sg_color_mask write_mask;
  3160. sg_blend_state blend;
  3161. } sg_color_target_state;
  3162. typedef struct sg_pipeline_desc {
  3163. uint32_t _start_canary;
  3164. bool compute;
  3165. sg_shader shader;
  3166. sg_vertex_layout_state layout;
  3167. sg_depth_state depth;
  3168. sg_stencil_state stencil;
  3169. int color_count;
  3170. sg_color_target_state colors[SG_MAX_COLOR_ATTACHMENTS];
  3171. sg_primitive_type primitive_type;
  3172. sg_index_type index_type;
  3173. sg_cull_mode cull_mode;
  3174. sg_face_winding face_winding;
  3175. int sample_count;
  3176. sg_color blend_color;
  3177. bool alpha_to_coverage_enabled;
  3178. const char* label;
  3179. uint32_t _end_canary;
  3180. } sg_pipeline_desc;
  3181. /*
  3182. sg_view_desc
  3183. Creation params for sg_view objects, passed into sg_make_view() calls.
  3184. View objects are passed into sg_apply_bindings() (for texture-, storage-buffer-
  3185. and storage-image views), and sg_begin_pass() (for color-, resolve-
  3186. and depth-stencil-attachment views).
  3187. The view type is determined by initializing one of the sub-structs of
  3188. sg_view_desc:
  3189. .texture a texture-view object will be created
  3190. .image the sg_image parent resource
  3191. .mip_levels optional mip-level range, keep zero-initialized for the
  3192. entire mipmap chain
  3193. .base the first mip level
  3194. .count number of mip levels, keeping this zero-initialized means
  3195. 'all remaining mip levels'
  3196. .slices optional slice range, keep zero-initialized to include
  3197. all slices
  3198. .base the first slice
  3199. .count number of slices, keeping this zero-initializied means 'all remaining slices'
  3200. .storage_buffer a storage-buffer-view object will be created
  3201. .buffer the sg_buffer parent resource, must have been created
  3202. with `sg_buffer_desc.usage.storage_buffer = true`
  3203. .offset optional 256-byte aligned byte-offset into the buffer
  3204. .storage_image a storage-image-view object will be created
  3205. .image the sg_image parent resource, must have been created
  3206. with `sg_image_desc.usage.storage_image = true`
  3207. .mip_level selects the mip-level for the compute shader to write
  3208. .slice selects the slice for the compute shader to write
  3209. .color_attachment a color-attachment-view object will be created
  3210. .image the sg_image parent resource, must have been created
  3211. with `sg_image_desc.usage.color_attachment = true`
  3212. .mip_level selects the mip-level to render into
  3213. .slice selects the slice to render into
  3214. .resolve_attachment a resolve-attachment-view object will be created
  3215. .image the sg_image parent resource, must have been created
  3216. with `sg_image_desc.usage.resolve_attachment = true`
  3217. .mip_level selects the mip-level to msaa-resolve into
  3218. .slice selects the slice to msaa-resolve into
  3219. .depth_stencil_attachment a depth-stencil-attachment-view object will be created
  3220. .image the sg_image parent resource, must have been created
  3221. with `sg_image_desc.usage.depth_stencil_attachment = true`
  3222. .mip_level selects the mip-level to render into
  3223. .slice selects the slice to render into
  3224. */
  3225. typedef struct sg_buffer_view_desc {
  3226. sg_buffer buffer;
  3227. int offset;
  3228. } sg_buffer_view_desc;
  3229. typedef struct sg_image_view_desc {
  3230. sg_image image;
  3231. int mip_level;
  3232. int slice; // cube texture: face; array texture: layer; 3D texture: depth-slice
  3233. } sg_image_view_desc;
  3234. typedef struct sg_texture_view_range {
  3235. int base;
  3236. int count;
  3237. } sg_texture_view_range;
  3238. typedef struct sg_texture_view_desc {
  3239. sg_image image;
  3240. sg_texture_view_range mip_levels;
  3241. sg_texture_view_range slices; // cube texture: face; array texture: layer; 3D texture: depth-slice
  3242. } sg_texture_view_desc;
  3243. typedef struct sg_view_desc {
  3244. uint32_t _start_canary;
  3245. sg_texture_view_desc texture;
  3246. sg_buffer_view_desc storage_buffer;
  3247. sg_image_view_desc storage_image;
  3248. sg_image_view_desc color_attachment;
  3249. sg_image_view_desc resolve_attachment;
  3250. sg_image_view_desc depth_stencil_attachment;
  3251. const char* label;
  3252. uint32_t _end_canary;
  3253. } sg_view_desc;
  3254. /*
  3255. sg_trace_hooks
  3256. Installable callback functions to keep track of the sokol-gfx calls,
  3257. this is useful for debugging, or keeping track of resource creation
  3258. and destruction.
  3259. Trace hooks are installed with sg_install_trace_hooks(), this returns
  3260. another sg_trace_hooks struct with the previous set of
  3261. trace hook function pointers. These should be invoked by the
  3262. new trace hooks to form a proper call chain.
  3263. */
  3264. typedef struct sg_trace_hooks {
  3265. void* user_data;
  3266. void (*reset_state_cache)(void* user_data);
  3267. void (*make_buffer)(const sg_buffer_desc* desc, sg_buffer result, void* user_data);
  3268. void (*make_image)(const sg_image_desc* desc, sg_image result, void* user_data);
  3269. void (*make_sampler)(const sg_sampler_desc* desc, sg_sampler result, void* user_data);
  3270. void (*make_shader)(const sg_shader_desc* desc, sg_shader result, void* user_data);
  3271. void (*make_pipeline)(const sg_pipeline_desc* desc, sg_pipeline result, void* user_data);
  3272. void (*make_view)(const sg_view_desc* desc, sg_view result, void* user_data);
  3273. void (*destroy_buffer)(sg_buffer buf, void* user_data);
  3274. void (*destroy_image)(sg_image img, void* user_data);
  3275. void (*destroy_sampler)(sg_sampler smp, void* user_data);
  3276. void (*destroy_shader)(sg_shader shd, void* user_data);
  3277. void (*destroy_pipeline)(sg_pipeline pip, void* user_data);
  3278. void (*destroy_view)(sg_view view, void* user_data);
  3279. void (*update_buffer)(sg_buffer buf, const sg_range* data, void* user_data);
  3280. void (*update_image)(sg_image img, const sg_image_data* data, void* user_data);
  3281. void (*append_buffer)(sg_buffer buf, const sg_range* data, int result, void* user_data);
  3282. void (*begin_pass)(const sg_pass* pass, void* user_data);
  3283. void (*apply_viewport)(int x, int y, int width, int height, bool origin_top_left, void* user_data);
  3284. void (*apply_scissor_rect)(int x, int y, int width, int height, bool origin_top_left, void* user_data);
  3285. void (*apply_pipeline)(sg_pipeline pip, void* user_data);
  3286. void (*apply_bindings)(const sg_bindings* bindings, void* user_data);
  3287. void (*apply_uniforms)(int ub_index, const sg_range* data, void* user_data);
  3288. void (*draw)(int base_element, int num_elements, int num_instances, void* user_data);
  3289. void (*draw_ex)(int base_element, int num_elements, int num_instances, int base_vertex, int base_instance, void* user_data);
  3290. void (*dispatch)(int num_groups_x, int num_groups_y, int num_groups_z, void* user_data);
  3291. void (*end_pass)(void* user_data);
  3292. void (*commit)(void* user_data);
  3293. void (*alloc_buffer)(sg_buffer result, void* user_data);
  3294. void (*alloc_image)(sg_image result, void* user_data);
  3295. void (*alloc_sampler)(sg_sampler result, void* user_data);
  3296. void (*alloc_shader)(sg_shader result, void* user_data);
  3297. void (*alloc_pipeline)(sg_pipeline result, void* user_data);
  3298. void (*alloc_view)(sg_view result, void* user_data);
  3299. void (*dealloc_buffer)(sg_buffer buf_id, void* user_data);
  3300. void (*dealloc_image)(sg_image img_id, void* user_data);
  3301. void (*dealloc_sampler)(sg_sampler smp_id, void* user_data);
  3302. void (*dealloc_shader)(sg_shader shd_id, void* user_data);
  3303. void (*dealloc_pipeline)(sg_pipeline pip_id, void* user_data);
  3304. void (*dealloc_view)(sg_view view_id, void* user_data);
  3305. void (*init_buffer)(sg_buffer buf_id, const sg_buffer_desc* desc, void* user_data);
  3306. void (*init_image)(sg_image img_id, const sg_image_desc* desc, void* user_data);
  3307. void (*init_sampler)(sg_sampler smp_id, const sg_sampler_desc* desc, void* user_data);
  3308. void (*init_shader)(sg_shader shd_id, const sg_shader_desc* desc, void* user_data);
  3309. void (*init_pipeline)(sg_pipeline pip_id, const sg_pipeline_desc* desc, void* user_data);
  3310. void (*init_view)(sg_view view_id, const sg_view_desc* desc, void* user_data);
  3311. void (*uninit_buffer)(sg_buffer buf_id, void* user_data);
  3312. void (*uninit_image)(sg_image img_id, void* user_data);
  3313. void (*uninit_sampler)(sg_sampler smp_id, void* user_data);
  3314. void (*uninit_shader)(sg_shader shd_id, void* user_data);
  3315. void (*uninit_pipeline)(sg_pipeline pip_id, void* user_data);
  3316. void (*uninit_view)(sg_view view_id, void* user_data);
  3317. void (*fail_buffer)(sg_buffer buf_id, void* user_data);
  3318. void (*fail_image)(sg_image img_id, void* user_data);
  3319. void (*fail_sampler)(sg_sampler smp_id, void* user_data);
  3320. void (*fail_shader)(sg_shader shd_id, void* user_data);
  3321. void (*fail_pipeline)(sg_pipeline pip_id, void* user_data);
  3322. void (*fail_view)(sg_view view_id, void* user_data);
  3323. void (*push_debug_group)(const char* name, void* user_data);
  3324. void (*pop_debug_group)(void* user_data);
  3325. } sg_trace_hooks;
  3326. /*
  3327. sg_buffer_info
  3328. sg_image_info
  3329. sg_sampler_info
  3330. sg_shader_info
  3331. sg_pipeline_info
  3332. sg_view_info
  3333. These structs contain various internal resource attributes which
  3334. might be useful for debug-inspection. Please don't rely on the
  3335. actual content of those structs too much, as they are quite closely
  3336. tied to sokol_gfx.h internals and may change more frequently than
  3337. the other public API elements.
  3338. The *_info structs are used as the return values of the following functions:
  3339. sg_query_buffer_info()
  3340. sg_query_image_info()
  3341. sg_query_sampler_info()
  3342. sg_query_shader_info()
  3343. sg_query_pipeline_info()
  3344. sg_query_view_info()
  3345. */
  3346. typedef struct sg_slot_info {
  3347. sg_resource_state state; // the current state of this resource slot
  3348. uint32_t res_id; // type-neutral resource if (e.g. sg_buffer.id)
  3349. uint32_t uninit_count;
  3350. } sg_slot_info;
  3351. typedef struct sg_buffer_info {
  3352. sg_slot_info slot; // resource pool slot info
  3353. uint32_t update_frame_index; // frame index of last sg_update_buffer()
  3354. uint32_t append_frame_index; // frame index of last sg_append_buffer()
  3355. int append_pos; // current position in buffer for sg_append_buffer()
  3356. bool append_overflow; // is buffer in overflow state (due to sg_append_buffer)
  3357. int num_slots; // number of renaming-slots for dynamically updated buffers
  3358. int active_slot; // currently active write-slot for dynamically updated buffers
  3359. } sg_buffer_info;
  3360. typedef struct sg_image_info {
  3361. sg_slot_info slot; // resource pool slot info
  3362. uint32_t upd_frame_index; // frame index of last sg_update_image()
  3363. int num_slots; // number of renaming-slots for dynamically updated images
  3364. int active_slot; // currently active write-slot for dynamically updated images
  3365. } sg_image_info;
  3366. typedef struct sg_sampler_info {
  3367. sg_slot_info slot; // resource pool slot info
  3368. } sg_sampler_info;
  3369. typedef struct sg_shader_info {
  3370. sg_slot_info slot; // resource pool slot info
  3371. } sg_shader_info;
  3372. typedef struct sg_pipeline_info {
  3373. sg_slot_info slot; // resource pool slot info
  3374. } sg_pipeline_info;
  3375. typedef struct sg_view_info {
  3376. sg_slot_info slot; // resource pool slot info
  3377. } sg_view_info;
  3378. /*
  3379. sg_stats
  3380. Allows to track generic and backend-specific rendering stats,
  3381. obtained via sg_query_stats().
  3382. */
  3383. typedef struct sg_frame_stats_gl {
  3384. uint32_t num_bind_buffer;
  3385. uint32_t num_active_texture;
  3386. uint32_t num_bind_texture;
  3387. uint32_t num_bind_sampler;
  3388. uint32_t num_bind_image_texture;
  3389. uint32_t num_use_program;
  3390. uint32_t num_render_state;
  3391. uint32_t num_vertex_attrib_pointer;
  3392. uint32_t num_vertex_attrib_divisor;
  3393. uint32_t num_enable_vertex_attrib_array;
  3394. uint32_t num_disable_vertex_attrib_array;
  3395. uint32_t num_uniform;
  3396. uint32_t num_memory_barriers;
  3397. } sg_frame_stats_gl;
  3398. typedef struct sg_frame_stats_d3d11_pass {
  3399. uint32_t num_om_set_render_targets;
  3400. uint32_t num_clear_render_target_view;
  3401. uint32_t num_clear_depth_stencil_view;
  3402. uint32_t num_resolve_subresource;
  3403. } sg_frame_stats_d3d11_pass;
  3404. typedef struct sg_frame_stats_d3d11_pipeline {
  3405. uint32_t num_rs_set_state;
  3406. uint32_t num_om_set_depth_stencil_state;
  3407. uint32_t num_om_set_blend_state;
  3408. uint32_t num_ia_set_primitive_topology;
  3409. uint32_t num_ia_set_input_layout;
  3410. uint32_t num_vs_set_shader;
  3411. uint32_t num_vs_set_constant_buffers;
  3412. uint32_t num_ps_set_shader;
  3413. uint32_t num_ps_set_constant_buffers;
  3414. uint32_t num_cs_set_shader;
  3415. uint32_t num_cs_set_constant_buffers;
  3416. } sg_frame_stats_d3d11_pipeline;
  3417. typedef struct sg_frame_stats_d3d11_bindings {
  3418. uint32_t num_ia_set_vertex_buffers;
  3419. uint32_t num_ia_set_index_buffer;
  3420. uint32_t num_vs_set_shader_resources;
  3421. uint32_t num_vs_set_samplers;
  3422. uint32_t num_ps_set_shader_resources;
  3423. uint32_t num_ps_set_samplers;
  3424. uint32_t num_cs_set_shader_resources;
  3425. uint32_t num_cs_set_samplers;
  3426. uint32_t num_cs_set_unordered_access_views;
  3427. } sg_frame_stats_d3d11_bindings;
  3428. typedef struct sg_frame_stats_d3d11_uniforms {
  3429. uint32_t num_update_subresource;
  3430. } sg_frame_stats_d3d11_uniforms;
  3431. typedef struct sg_frame_stats_d3d11_draw {
  3432. uint32_t num_draw_indexed_instanced;
  3433. uint32_t num_draw_indexed;
  3434. uint32_t num_draw_instanced;
  3435. uint32_t num_draw;
  3436. } sg_frame_stats_d3d11_draw;
  3437. typedef struct sg_frame_stats_d3d11 {
  3438. sg_frame_stats_d3d11_pass pass;
  3439. sg_frame_stats_d3d11_pipeline pipeline;
  3440. sg_frame_stats_d3d11_bindings bindings;
  3441. sg_frame_stats_d3d11_uniforms uniforms;
  3442. sg_frame_stats_d3d11_draw draw;
  3443. uint32_t num_map;
  3444. uint32_t num_unmap;
  3445. } sg_frame_stats_d3d11;
  3446. typedef struct sg_frame_stats_metal_idpool {
  3447. uint32_t num_added;
  3448. uint32_t num_released;
  3449. uint32_t num_garbage_collected;
  3450. } sg_frame_stats_metal_idpool;
  3451. typedef struct sg_frame_stats_metal_pipeline {
  3452. uint32_t num_set_blend_color;
  3453. uint32_t num_set_cull_mode;
  3454. uint32_t num_set_front_facing_winding;
  3455. uint32_t num_set_stencil_reference_value;
  3456. uint32_t num_set_depth_bias;
  3457. uint32_t num_set_render_pipeline_state;
  3458. uint32_t num_set_depth_stencil_state;
  3459. } sg_frame_stats_metal_pipeline;
  3460. typedef struct sg_frame_stats_metal_bindings {
  3461. uint32_t num_set_vertex_buffer;
  3462. uint32_t num_set_vertex_buffer_offset;
  3463. uint32_t num_skip_redundant_vertex_buffer;
  3464. uint32_t num_set_vertex_texture;
  3465. uint32_t num_skip_redundant_vertex_texture;
  3466. uint32_t num_set_vertex_sampler_state;
  3467. uint32_t num_skip_redundant_vertex_sampler_state;
  3468. uint32_t num_set_fragment_buffer;
  3469. uint32_t num_set_fragment_buffer_offset;
  3470. uint32_t num_skip_redundant_fragment_buffer;
  3471. uint32_t num_set_fragment_texture;
  3472. uint32_t num_skip_redundant_fragment_texture;
  3473. uint32_t num_set_fragment_sampler_state;
  3474. uint32_t num_skip_redundant_fragment_sampler_state;
  3475. uint32_t num_set_compute_buffer;
  3476. uint32_t num_set_compute_buffer_offset;
  3477. uint32_t num_skip_redundant_compute_buffer;
  3478. uint32_t num_set_compute_texture;
  3479. uint32_t num_skip_redundant_compute_texture;
  3480. uint32_t num_set_compute_sampler_state;
  3481. uint32_t num_skip_redundant_compute_sampler_state;
  3482. } sg_frame_stats_metal_bindings;
  3483. typedef struct sg_frame_stats_metal_uniforms {
  3484. uint32_t num_set_vertex_buffer_offset;
  3485. uint32_t num_set_fragment_buffer_offset;
  3486. uint32_t num_set_compute_buffer_offset;
  3487. } sg_frame_stats_metal_uniforms;
  3488. typedef struct sg_frame_stats_metal {
  3489. sg_frame_stats_metal_idpool idpool;
  3490. sg_frame_stats_metal_pipeline pipeline;
  3491. sg_frame_stats_metal_bindings bindings;
  3492. sg_frame_stats_metal_uniforms uniforms;
  3493. } sg_frame_stats_metal;
  3494. typedef struct sg_frame_stats_wgpu_uniforms {
  3495. uint32_t num_set_bindgroup;
  3496. uint32_t size_write_buffer;
  3497. } sg_frame_stats_wgpu_uniforms;
  3498. typedef struct sg_frame_stats_wgpu_bindings {
  3499. uint32_t num_set_vertex_buffer;
  3500. uint32_t num_skip_redundant_vertex_buffer;
  3501. uint32_t num_set_index_buffer;
  3502. uint32_t num_skip_redundant_index_buffer;
  3503. uint32_t num_create_bindgroup;
  3504. uint32_t num_discard_bindgroup;
  3505. uint32_t num_set_bindgroup;
  3506. uint32_t num_skip_redundant_bindgroup;
  3507. uint32_t num_bindgroup_cache_hits;
  3508. uint32_t num_bindgroup_cache_misses;
  3509. uint32_t num_bindgroup_cache_collisions;
  3510. uint32_t num_bindgroup_cache_invalidates;
  3511. uint32_t num_bindgroup_cache_hash_vs_key_mismatch;
  3512. } sg_frame_stats_wgpu_bindings;
  3513. typedef struct sg_frame_stats_wgpu {
  3514. sg_frame_stats_wgpu_uniforms uniforms;
  3515. sg_frame_stats_wgpu_bindings bindings;
  3516. } sg_frame_stats_wgpu;
  3517. typedef struct sg_frame_stats_vk {
  3518. uint32_t num_cmd_pipeline_barrier;
  3519. uint32_t num_allocate_memory;
  3520. uint32_t num_free_memory;
  3521. uint32_t size_allocate_memory;
  3522. uint32_t num_delete_queue_added;
  3523. uint32_t num_delete_queue_collected;
  3524. uint32_t num_cmd_copy_buffer;
  3525. uint32_t num_cmd_copy_buffer_to_image;
  3526. uint32_t num_cmd_set_descriptor_buffer_offsets;
  3527. uint32_t size_descriptor_buffer_writes;
  3528. } sg_frame_stats_vk;
  3529. typedef struct sg_frame_resource_stats {
  3530. uint32_t allocated; // number of allocated objects in current frame
  3531. uint32_t deallocated; // number of deallocated object in current frame
  3532. uint32_t inited; // number of initialized objects in current frame
  3533. uint32_t uninited; // number of deinitialized objects in current frame
  3534. } sg_frame_resource_stats;
  3535. typedef struct sg_total_resource_stats {
  3536. uint32_t alive; // number of live objects in pool
  3537. uint32_t free; // number of free objects in pool
  3538. uint32_t allocated; // total number of object allocations
  3539. uint32_t deallocated; // total number of object deallocations
  3540. uint32_t inited; // total number of object initializations
  3541. uint32_t uninited; // total number of object deinitializations
  3542. } sg_total_resource_stats;
  3543. typedef struct sg_total_stats {
  3544. sg_total_resource_stats buffers;
  3545. sg_total_resource_stats images;
  3546. sg_total_resource_stats samplers;
  3547. sg_total_resource_stats views;
  3548. sg_total_resource_stats shaders;
  3549. sg_total_resource_stats pipelines;
  3550. } sg_total_stats;
  3551. typedef struct sg_frame_stats {
  3552. uint32_t frame_index; // current frame counter, starts at 0
  3553. uint32_t num_passes;
  3554. uint32_t num_apply_viewport;
  3555. uint32_t num_apply_scissor_rect;
  3556. uint32_t num_apply_pipeline;
  3557. uint32_t num_apply_bindings;
  3558. uint32_t num_apply_uniforms;
  3559. uint32_t num_draw;
  3560. uint32_t num_draw_ex;
  3561. uint32_t num_dispatch;
  3562. uint32_t num_update_buffer;
  3563. uint32_t num_append_buffer;
  3564. uint32_t num_update_image;
  3565. uint32_t size_apply_uniforms;
  3566. uint32_t size_update_buffer;
  3567. uint32_t size_append_buffer;
  3568. uint32_t size_update_image;
  3569. sg_frame_resource_stats buffers;
  3570. sg_frame_resource_stats images;
  3571. sg_frame_resource_stats samplers;
  3572. sg_frame_resource_stats views;
  3573. sg_frame_resource_stats shaders;
  3574. sg_frame_resource_stats pipelines;
  3575. sg_frame_stats_gl gl;
  3576. sg_frame_stats_d3d11 d3d11;
  3577. sg_frame_stats_metal metal;
  3578. sg_frame_stats_wgpu wgpu;
  3579. sg_frame_stats_vk vk;
  3580. } sg_frame_stats;
  3581. typedef struct sg_stats {
  3582. sg_frame_stats prev_frame;
  3583. sg_frame_stats cur_frame;
  3584. sg_total_stats total;
  3585. } sg_stats;
  3586. /*
  3587. sg_log_item
  3588. An enum with a unique item for each log message, warning, error
  3589. and validation layer message. Note that these messages are only
  3590. visible when a logger function is installed in the sg_setup() call.
  3591. */
  3592. #define _SG_LOG_ITEMS \
  3593. _SG_LOGITEM_XMACRO(OK, "Ok") \
  3594. _SG_LOGITEM_XMACRO(MALLOC_FAILED, "memory allocation failed") \
  3595. _SG_LOGITEM_XMACRO(GL_TEXTURE_FORMAT_NOT_SUPPORTED, "pixel format not supported for texture (gl)") \
  3596. _SG_LOGITEM_XMACRO(GL_3D_TEXTURES_NOT_SUPPORTED, "3d textures not supported (gl)") \
  3597. _SG_LOGITEM_XMACRO(GL_ARRAY_TEXTURES_NOT_SUPPORTED, "array textures not supported (gl)") \
  3598. _SG_LOGITEM_XMACRO(GL_STORAGEBUFFER_GLSL_BINDING_OUT_OF_RANGE, "GLSL storage buffer bindslot is out of range (sg_limits.max_storage_buffer_bindings_per_stage) (gl)") \
  3599. _SG_LOGITEM_XMACRO(GL_STORAGEIMAGE_GLSL_BINDING_OUT_OF_RANGE, "GLSL storage image bindslot is out of range (sg.limits.max_storage_image_bindings_per_stage) (gl)") \
  3600. _SG_LOGITEM_XMACRO(GL_SHADER_COMPILATION_FAILED, "shader compilation failed (gl)") \
  3601. _SG_LOGITEM_XMACRO(GL_SHADER_LINKING_FAILED, "shader linking failed (gl)") \
  3602. _SG_LOGITEM_XMACRO(GL_VERTEX_ATTRIBUTE_NOT_FOUND_IN_SHADER, "vertex attribute not found in shader; NOTE: may be caused by GL driver's GLSL compiler removing unused globals") \
  3603. _SG_LOGITEM_XMACRO(GL_UNIFORMBLOCK_NAME_NOT_FOUND_IN_SHADER, "uniform block name not found in shader; NOTE: may be caused by GL driver's GLSL compiler removing unused globals") \
  3604. _SG_LOGITEM_XMACRO(GL_IMAGE_SAMPLER_NAME_NOT_FOUND_IN_SHADER, "image-sampler name not found in shader; NOTE: may be caused by GL driver's GLSL compiler removing unused globals") \
  3605. _SG_LOGITEM_XMACRO(GL_FRAMEBUFFER_STATUS_UNDEFINED, "framebuffer completeness check failed with GL_FRAMEBUFFER_UNDEFINED (gl)") \
  3606. _SG_LOGITEM_XMACRO(GL_FRAMEBUFFER_STATUS_INCOMPLETE_ATTACHMENT, "framebuffer completeness check failed with GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT (gl)") \
  3607. _SG_LOGITEM_XMACRO(GL_FRAMEBUFFER_STATUS_INCOMPLETE_MISSING_ATTACHMENT, "framebuffer completeness check failed with GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT (gl)") \
  3608. _SG_LOGITEM_XMACRO(GL_FRAMEBUFFER_STATUS_UNSUPPORTED, "framebuffer completeness check failed with GL_FRAMEBUFFER_UNSUPPORTED (gl)") \
  3609. _SG_LOGITEM_XMACRO(GL_FRAMEBUFFER_STATUS_INCOMPLETE_MULTISAMPLE, "framebuffer completeness check failed with GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE (gl)") \
  3610. _SG_LOGITEM_XMACRO(GL_FRAMEBUFFER_STATUS_UNKNOWN, "framebuffer completeness check failed (unknown reason) (gl)") \
  3611. _SG_LOGITEM_XMACRO(D3D11_FEATURE_LEVEL_0_DETECTED, "D3D11 Feature Level 0 device detected, this restricts the number of UAV slots to 8! (d3d11)") \
  3612. _SG_LOGITEM_XMACRO(D3D11_CREATE_BUFFER_FAILED, "CreateBuffer() failed (d3d11)") \
  3613. _SG_LOGITEM_XMACRO(D3D11_CREATE_BUFFER_SRV_FAILED, "CreateShaderResourceView() failed for storage buffer (d3d11)") \
  3614. _SG_LOGITEM_XMACRO(D3D11_CREATE_BUFFER_UAV_FAILED, "CreateUnorderedAccessView() failed for storage buffer (d3d11)") \
  3615. _SG_LOGITEM_XMACRO(D3D11_CREATE_DEPTH_TEXTURE_UNSUPPORTED_PIXEL_FORMAT, "pixel format not supported for depth-stencil texture (d3d11)") \
  3616. _SG_LOGITEM_XMACRO(D3D11_CREATE_DEPTH_TEXTURE_FAILED, "CreateTexture2D() failed for depth-stencil texture (d3d11)") \
  3617. _SG_LOGITEM_XMACRO(D3D11_CREATE_2D_TEXTURE_UNSUPPORTED_PIXEL_FORMAT, "pixel format not supported for 2d-, cube- or array-texture (d3d11)") \
  3618. _SG_LOGITEM_XMACRO(D3D11_CREATE_2D_TEXTURE_FAILED, "CreateTexture2D() failed for 2d-, cube- or array-texture (d3d11)") \
  3619. _SG_LOGITEM_XMACRO(D3D11_CREATE_2D_SRV_FAILED, "CreateShaderResourceView() failed for 2d-, cube- or array-texture (d3d11)") \
  3620. _SG_LOGITEM_XMACRO(D3D11_CREATE_3D_TEXTURE_UNSUPPORTED_PIXEL_FORMAT, "pixel format not supported for 3D texture (d3d11)") \
  3621. _SG_LOGITEM_XMACRO(D3D11_CREATE_3D_TEXTURE_FAILED, "CreateTexture3D() failed (d3d11)") \
  3622. _SG_LOGITEM_XMACRO(D3D11_CREATE_3D_SRV_FAILED, "CreateShaderResourceView() failed for 3d texture (d3d11)") \
  3623. _SG_LOGITEM_XMACRO(D3D11_CREATE_MSAA_TEXTURE_FAILED, "CreateTexture2D() failed for MSAA render target texture (d3d11)") \
  3624. _SG_LOGITEM_XMACRO(D3D11_CREATE_SAMPLER_STATE_FAILED, "CreateSamplerState() failed (d3d11)") \
  3625. _SG_LOGITEM_XMACRO(D3D11_UNIFORMBLOCK_HLSL_REGISTER_B_OUT_OF_RANGE, "sg_shader_desc.uniform_blocks[].hlsl_register_b_n is out of range (must be 0..7)") \
  3626. _SG_LOGITEM_XMACRO(D3D11_STORAGEBUFFER_HLSL_REGISTER_T_OUT_OF_RANGE, "sg_shader_desc.views[].storage_buffer.hlsl_register_t_n is out of range (must be 0..31)") \
  3627. _SG_LOGITEM_XMACRO(D3D11_STORAGEBUFFER_HLSL_REGISTER_U_OUT_OF_RANGE, "sg_shader_desc.views[].storage_buffer.hlsl_register_u_n is out of range (must be 0..31)") \
  3628. _SG_LOGITEM_XMACRO(D3D11_IMAGE_HLSL_REGISTER_T_OUT_OF_RANGE, "sg_shader_desc.views[].texture.hlsl_register_t_n is out of range (must be 0..31)") \
  3629. _SG_LOGITEM_XMACRO(D3D11_STORAGEIMAGE_HLSL_REGISTER_U_OUT_OF_RANGE, "sg_shader_desc.views[].storage_image.hlsl_register_u_n is out of range (must be 0..31)") \
  3630. _SG_LOGITEM_XMACRO(D3D11_SAMPLER_HLSL_REGISTER_S_OUT_OF_RANGE, "sampler 'hlsl_register_s_n' is out of rang (must be 0..11)") \
  3631. _SG_LOGITEM_XMACRO(D3D11_LOAD_D3DCOMPILER_47_DLL_FAILED, "loading d3dcompiler_47.dll failed (d3d11)") \
  3632. _SG_LOGITEM_XMACRO(D3D11_SHADER_COMPILATION_FAILED, "shader compilation failed (d3d11)") \
  3633. _SG_LOGITEM_XMACRO(D3D11_SHADER_COMPILATION_OUTPUT, "") \
  3634. _SG_LOGITEM_XMACRO(D3D11_CREATE_CONSTANT_BUFFER_FAILED, "CreateBuffer() failed for uniform constant buffer (d3d11)") \
  3635. _SG_LOGITEM_XMACRO(D3D11_CREATE_INPUT_LAYOUT_FAILED, "CreateInputLayout() failed (d3d11)") \
  3636. _SG_LOGITEM_XMACRO(D3D11_CREATE_RASTERIZER_STATE_FAILED, "CreateRasterizerState() failed (d3d11)") \
  3637. _SG_LOGITEM_XMACRO(D3D11_CREATE_DEPTH_STENCIL_STATE_FAILED, "CreateDepthStencilState() failed (d3d11)") \
  3638. _SG_LOGITEM_XMACRO(D3D11_CREATE_BLEND_STATE_FAILED, "CreateBlendState() failed (d3d11)") \
  3639. _SG_LOGITEM_XMACRO(D3D11_CREATE_RTV_FAILED, "CreateRenderTargetView() failed (d3d11)") \
  3640. _SG_LOGITEM_XMACRO(D3D11_CREATE_DSV_FAILED, "CreateDepthStencilView() failed (d3d11)") \
  3641. _SG_LOGITEM_XMACRO(D3D11_CREATE_UAV_FAILED, "CreateUnorderedAccessView() failed (d3d11)") \
  3642. _SG_LOGITEM_XMACRO(D3D11_MAP_FOR_UPDATE_BUFFER_FAILED, "Map() failed when updating buffer (d3d11)") \
  3643. _SG_LOGITEM_XMACRO(D3D11_MAP_FOR_APPEND_BUFFER_FAILED, "Map() failed when appending to buffer (d3d11)") \
  3644. _SG_LOGITEM_XMACRO(D3D11_MAP_FOR_UPDATE_IMAGE_FAILED, "Map() failed when updating image (d3d11)") \
  3645. _SG_LOGITEM_XMACRO(METAL_CREATE_BUFFER_FAILED, "failed to create buffer object (metal)") \
  3646. _SG_LOGITEM_XMACRO(METAL_TEXTURE_FORMAT_NOT_SUPPORTED, "pixel format not supported for texture (metal)") \
  3647. _SG_LOGITEM_XMACRO(METAL_CREATE_TEXTURE_FAILED, "failed to create texture object (metal)") \
  3648. _SG_LOGITEM_XMACRO(METAL_CREATE_SAMPLER_FAILED, "failed to create sampler object (metal)") \
  3649. _SG_LOGITEM_XMACRO(METAL_SHADER_COMPILATION_FAILED, "shader compilation failed (metal)") \
  3650. _SG_LOGITEM_XMACRO(METAL_SHADER_CREATION_FAILED, "shader creation failed (metal)") \
  3651. _SG_LOGITEM_XMACRO(METAL_SHADER_COMPILATION_OUTPUT, "") \
  3652. _SG_LOGITEM_XMACRO(METAL_SHADER_ENTRY_NOT_FOUND, "shader entry function not found (metal)") \
  3653. _SG_LOGITEM_XMACRO(METAL_UNIFORMBLOCK_MSL_BUFFER_SLOT_OUT_OF_RANGE, "uniform block 'msl_buffer_n' is out of range (must be 0..7)") \
  3654. _SG_LOGITEM_XMACRO(METAL_STORAGEBUFFER_MSL_BUFFER_SLOT_OUT_OF_RANGE, "storage buffer 'msl_buffer_n' is out of range (must be 8..23)") \
  3655. _SG_LOGITEM_XMACRO(METAL_STORAGEIMAGE_MSL_TEXTURE_SLOT_OUT_OF_RANGE, "storage image 'msl_texture_n' is out of range (must be 0..31)") \
  3656. _SG_LOGITEM_XMACRO(METAL_IMAGE_MSL_TEXTURE_SLOT_OUT_OF_RANGE, "image 'msl_texture_n' is out of range (must be 0..31)") \
  3657. _SG_LOGITEM_XMACRO(METAL_SAMPLER_MSL_SAMPLER_SLOT_OUT_OF_RANGE, "sampler 'msl_sampler_n' is out of range (must be 0..11)") \
  3658. _SG_LOGITEM_XMACRO(METAL_CREATE_CPS_FAILED, "failed to create compute pipeline state (metal)") \
  3659. _SG_LOGITEM_XMACRO(METAL_CREATE_CPS_OUTPUT, "") \
  3660. _SG_LOGITEM_XMACRO(METAL_CREATE_RPS_FAILED, "failed to create render pipeline state (metal)") \
  3661. _SG_LOGITEM_XMACRO(METAL_CREATE_RPS_OUTPUT, "") \
  3662. _SG_LOGITEM_XMACRO(METAL_CREATE_DSS_FAILED, "failed to create depth stencil state (metal)") \
  3663. _SG_LOGITEM_XMACRO(WGPU_BINDGROUPS_POOL_EXHAUSTED, "bindgroups pool exhausted (increase sg_desc.bindgroups_cache_size) (wgpu)") \
  3664. _SG_LOGITEM_XMACRO(WGPU_BINDGROUPSCACHE_SIZE_GREATER_ONE, "sg_desc.wgpu.bindgroups_cache_size must be > 1 (wgpu)") \
  3665. _SG_LOGITEM_XMACRO(WGPU_BINDGROUPSCACHE_SIZE_POW2, "sg_desc.wgpu.bindgroups_cache_size must be a power of 2 (wgpu)") \
  3666. _SG_LOGITEM_XMACRO(WGPU_CREATEBINDGROUP_FAILED, "wgpuDeviceCreateBindGroup failed") \
  3667. _SG_LOGITEM_XMACRO(WGPU_CREATE_BUFFER_FAILED, "wgpuDeviceCreateBuffer() failed") \
  3668. _SG_LOGITEM_XMACRO(WGPU_CREATE_TEXTURE_FAILED, "wgpuDeviceCreateTexture() failed") \
  3669. _SG_LOGITEM_XMACRO(WGPU_CREATE_TEXTURE_VIEW_FAILED, "wgpuTextureCreateView() failed") \
  3670. _SG_LOGITEM_XMACRO(WGPU_CREATE_SAMPLER_FAILED, "wgpuDeviceCreateSampler() failed") \
  3671. _SG_LOGITEM_XMACRO(WGPU_CREATE_SHADER_MODULE_FAILED, "wgpuDeviceCreateShaderModule() failed") \
  3672. _SG_LOGITEM_XMACRO(WGPU_SHADER_CREATE_BINDGROUP_LAYOUT_FAILED, "wgpuDeviceCreateBindGroupLayout() for shader stage failed") \
  3673. _SG_LOGITEM_XMACRO(WGPU_UNIFORMBLOCK_WGSL_GROUP0_BINDING_OUT_OF_RANGE, "uniform block 'wgsl_group0_binding_n' is out of range (must be 0..15)") \
  3674. _SG_LOGITEM_XMACRO(WGPU_TEXTURE_WGSL_GROUP1_BINDING_OUT_OF_RANGE, "texture 'wgsl_group1_binding_n' is out of range (must be 0..127)") \
  3675. _SG_LOGITEM_XMACRO(WGPU_STORAGEBUFFER_WGSL_GROUP1_BINDING_OUT_OF_RANGE, "storage buffer 'wgsl_group1_binding_n' is out of range (must be 0..127)") \
  3676. _SG_LOGITEM_XMACRO(WGPU_STORAGEIMAGE_WGSL_GROUP1_BINDING_OUT_OF_RANGE, "storage image 'wgsl_group1_binding_n' is out of range (must be 0..127)") \
  3677. _SG_LOGITEM_XMACRO(WGPU_SAMPLER_WGSL_GROUP1_BINDING_OUT_OF_RANGE, "sampler 'wgsl_group1_binding_n' is out of range (must be 0..127)") \
  3678. _SG_LOGITEM_XMACRO(WGPU_CREATE_PIPELINE_LAYOUT_FAILED, "wgpuDeviceCreatePipelineLayout() failed") \
  3679. _SG_LOGITEM_XMACRO(WGPU_CREATE_RENDER_PIPELINE_FAILED, "wgpuDeviceCreateRenderPipeline() failed") \
  3680. _SG_LOGITEM_XMACRO(WGPU_CREATE_COMPUTE_PIPELINE_FAILED, "wgpuDeviceCreateComputePipeline() failed") \
  3681. _SG_LOGITEM_XMACRO(VULKAN_REQUIRED_EXTENSION_FUNCTION_MISSING, "vulkan: could not look up a required extension function pointer") \
  3682. _SG_LOGITEM_XMACRO(VULKAN_ALLOC_DEVICE_MEMORY_NO_SUITABLE_MEMORY_TYPE, "vulkan: could not find suitable memory type") \
  3683. _SG_LOGITEM_XMACRO(VULKAN_ALLOCATE_MEMORY_FAILED, "vulkan: vkAllocateMemory() failed!") \
  3684. _SG_LOGITEM_XMACRO(VULKAN_ALLOC_BUFFER_DEVICE_MEMORY_FAILED, "vulkan: allocating buffer device memory failed") \
  3685. _SG_LOGITEM_XMACRO(VULKAN_ALLOC_IMAGE_DEVICE_MEMORY_FAILED, "vulkan: allocating image device memory failed") \
  3686. _SG_LOGITEM_XMACRO(VULKAN_DELETE_QUEUE_EXHAUSTED, "vulkan: internal delete queue exhausted (too many objects destroyed per frame)") \
  3687. _SG_LOGITEM_XMACRO(VULKAN_STAGING_CREATE_BUFFER_FAILED, "vulkan: vkCreateBuffer() failed for staging buffer") \
  3688. _SG_LOGITEM_XMACRO(VULKAN_STAGING_ALLOCATE_MEMORY_FAILED, "vulkan: allocating device memory for staging buffer failed") \
  3689. _SG_LOGITEM_XMACRO(VULKAN_STAGING_BIND_BUFFER_MEMORY_FAILED, "vulkan: vkBindBufferMemory() failed for staging buffer") \
  3690. _SG_LOGITEM_XMACRO(VULKAN_STAGING_STREAM_BUFFER_OVERFLOW, "vulkan: per-frame stream staging buffer has overflown (sg_desc.vulkan.stream_staging_buffer_size)") \
  3691. _SG_LOGITEM_XMACRO(VULKAN_CREATE_SHARED_BUFFER_FAILED, "vulkan: vkCreateBuffer() failed for cpu/gpu-shared buffer") \
  3692. _SG_LOGITEM_XMACRO(VULKAN_ALLOCATE_SHARED_BUFFER_MEMORY_FAILED, "vulkan: allocating device memory for cpu/gpu-shared buffer failed") \
  3693. _SG_LOGITEM_XMACRO(VULKAN_BIND_SHARED_BUFFER_MEMORY_FAILED, "vulkan: vkBindBufferMemory() failed for cpu/gpu-shared buffer") \
  3694. _SG_LOGITEM_XMACRO(VULKAN_MAP_SHARED_BUFFER_MEMORY_FAILED, "vulkan: vkMapMemory() failed on cpu/gpu-shared buffer") \
  3695. _SG_LOGITEM_XMACRO(VULKAN_CREATE_BUFFER_FAILED, "vulkan: vkCreateBuffer() failed!") \
  3696. _SG_LOGITEM_XMACRO(VULKAN_BIND_BUFFER_MEMORY_FAILED, "vulkan: vkBindBufferMemory() failed!") \
  3697. _SG_LOGITEM_XMACRO(VULKAN_CREATE_IMAGE_FAILED, "vulkan: vkCreateImage() failed!") \
  3698. _SG_LOGITEM_XMACRO(VULKAN_BIND_IMAGE_MEMORY_FAILED, "vulkan: vkBindImageMemory() failed!") \
  3699. _SG_LOGITEM_XMACRO(VULKAN_CREATE_SHADER_MODULE_FAILED, "vukan: vkCreateShaderModule() failed!") \
  3700. _SG_LOGITEM_XMACRO(VULKAN_UNIFORMBLOCK_SPIRV_SET0_BINDING_OUT_OF_RANGE, "vulkan: uniform block 'spirv_set0_binding_n' is out of range (must be 0..15)") \
  3701. _SG_LOGITEM_XMACRO(VULKAN_TEXTURE_SPIRV_SET1_BINDING_OUT_OF_RANGE, "vulkan: texture 'spirv_set1_binding_n' is out of range (must be 0..127)") \
  3702. _SG_LOGITEM_XMACRO(VULKAN_STORAGEBUFFER_SPIRV_SET1_BINDING_OUT_OF_RANGE, "vulkan: storage buffer 'spirv_set1_binding_n' is out of range (must be 0..127)") \
  3703. _SG_LOGITEM_XMACRO(VULKAN_STORAGEIMAGE_SPIRV_SET1_BINDING_OUT_OF_RANGE, "vulkan: storage image 'spirv_set1_binding_n' is out of range (must be 0..127)") \
  3704. _SG_LOGITEM_XMACRO(VULKAN_SAMPLER_SPIRV_SET1_BINDING_OUT_OF_RANGE, "vulkan: sampler 'spirv_set1_binding_n' is out of range (must be 0..127)") \
  3705. _SG_LOGITEM_XMACRO(VULKAN_CREATE_DESCRIPTOR_SET_LAYOUT_FAILED, "vulkan: vkCreateDescriptorSetLayout() failed!") \
  3706. _SG_LOGITEM_XMACRO(VULKAN_CREATE_PIPELINE_LAYOUT_FAILED, "vulkan: vkCreatePipelineLayout() failed!") \
  3707. _SG_LOGITEM_XMACRO(VULKAN_CREATE_GRAPHICS_PIPELINE_FAILED, "vulkan: vkCreateGraphicsPipelines() failed!") \
  3708. _SG_LOGITEM_XMACRO(VULKAN_CREATE_COMPUTE_PIPELINE_FAILED, "vulkan: vkCreateComputePipelines() failed!") \
  3709. _SG_LOGITEM_XMACRO(VULKAN_CREATE_IMAGE_VIEW_FAILED, "vulkan: vkCreateImageView() failed!") \
  3710. _SG_LOGITEM_XMACRO(VULKAN_VIEW_MAX_DESCRIPTOR_SIZE, "vulkan: required view descriptor size is greater than _SG_VK_MAX_DESCRIPTOR_DATA_SIZE") \
  3711. _SG_LOGITEM_XMACRO(VULKAN_CREATE_SAMPLER_FAILED, "vulkan: vkCreateSampler() failed!") \
  3712. _SG_LOGITEM_XMACRO(VULKAN_SAMPLER_MAX_DESCRIPTOR_SIZE, "vulkan: required sampler descriptor size is greater than _SG_VK_MAX_DESCRIPTOR_DATA_SIZE") \
  3713. _SG_LOGITEM_XMACRO(VULKAN_WAIT_FOR_FENCE_FAILED, "vulkan: vkWaitForFence() failed!") \
  3714. _SG_LOGITEM_XMACRO(VULKAN_UNIFORM_BUFFER_OVERFLOW, "vulkan: uniform buffer has overflown (increase sg_desc.uniform_buffer_size)") \
  3715. _SG_LOGITEM_XMACRO(VULKAN_DESCRIPTOR_BUFFER_OVERFLOW, "vulkan: desccriptor buffer has overflown (increase sg_desc.vulkan.descriptor_buffer_size)") \
  3716. _SG_LOGITEM_XMACRO(IDENTICAL_COMMIT_LISTENER, "attempting to add identical commit listener") \
  3717. _SG_LOGITEM_XMACRO(COMMIT_LISTENER_ARRAY_FULL, "commit listener array full") \
  3718. _SG_LOGITEM_XMACRO(TRACE_HOOKS_NOT_ENABLED, "sg_install_trace_hooks() called, but SOKOL_TRACE_HOOKS is not defined") \
  3719. _SG_LOGITEM_XMACRO(DEALLOC_BUFFER_INVALID_STATE, "sg_dealloc_buffer(): buffer must be in ALLOC state") \
  3720. _SG_LOGITEM_XMACRO(DEALLOC_IMAGE_INVALID_STATE, "sg_dealloc_image(): image must be in alloc state") \
  3721. _SG_LOGITEM_XMACRO(DEALLOC_SAMPLER_INVALID_STATE, "sg_dealloc_sampler(): sampler must be in alloc state") \
  3722. _SG_LOGITEM_XMACRO(DEALLOC_SHADER_INVALID_STATE, "sg_dealloc_shader(): shader must be in ALLOC state") \
  3723. _SG_LOGITEM_XMACRO(DEALLOC_PIPELINE_INVALID_STATE, "sg_dealloc_pipeline(): pipeline must be in ALLOC state") \
  3724. _SG_LOGITEM_XMACRO(DEALLOC_VIEW_INVALID_STATE, "sg_dealloc_view(): view must be in ALLOC state") \
  3725. _SG_LOGITEM_XMACRO(INIT_BUFFER_INVALID_STATE, "sg_init_buffer(): buffer must be in ALLOC state") \
  3726. _SG_LOGITEM_XMACRO(INIT_IMAGE_INVALID_STATE, "sg_init_image(): image must be in ALLOC state") \
  3727. _SG_LOGITEM_XMACRO(INIT_SAMPLER_INVALID_STATE, "sg_init_sampler(): sampler must be in ALLOC state") \
  3728. _SG_LOGITEM_XMACRO(INIT_SHADER_INVALID_STATE, "sg_init_shader(): shader must be in ALLOC state") \
  3729. _SG_LOGITEM_XMACRO(INIT_PIPELINE_INVALID_STATE, "sg_init_pipeline(): pipeline must be in ALLOC state") \
  3730. _SG_LOGITEM_XMACRO(INIT_VIEW_INVALID_STATE, "sg_init_view(): view must be in ALLOC state") \
  3731. _SG_LOGITEM_XMACRO(UNINIT_BUFFER_INVALID_STATE, "sg_uninit_buffer(): buffer must be in VALID, FAILED or ALLOC state") \
  3732. _SG_LOGITEM_XMACRO(UNINIT_IMAGE_INVALID_STATE, "sg_uninit_image(): image must be in VALID, FAILED or ALLOC state") \
  3733. _SG_LOGITEM_XMACRO(UNINIT_SAMPLER_INVALID_STATE, "sg_uninit_sampler(): sampler must be in VALID, FAILED or ALLOC state") \
  3734. _SG_LOGITEM_XMACRO(UNINIT_SHADER_INVALID_STATE, "sg_uninit_shader(): shader must be in VALID, FAILED or ALLOC state") \
  3735. _SG_LOGITEM_XMACRO(UNINIT_PIPELINE_INVALID_STATE, "sg_uninit_pipeline(): pipeline must be in VALID, FAILED or ALLOC state") \
  3736. _SG_LOGITEM_XMACRO(UNINIT_VIEW_INVALID_STATE, "sg_uninit_view(): view must be in VALID, FAILED or ALLOC state") \
  3737. _SG_LOGITEM_XMACRO(FAIL_BUFFER_INVALID_STATE, "sg_fail_buffer(): buffer must be in ALLOC state") \
  3738. _SG_LOGITEM_XMACRO(FAIL_IMAGE_INVALID_STATE, "sg_fail_image(): image must be in ALLOC state") \
  3739. _SG_LOGITEM_XMACRO(FAIL_SAMPLER_INVALID_STATE, "sg_fail_sampler(): sampler must be in ALLOC state") \
  3740. _SG_LOGITEM_XMACRO(FAIL_SHADER_INVALID_STATE, "sg_fail_shader(): shader must be in ALLOC state") \
  3741. _SG_LOGITEM_XMACRO(FAIL_PIPELINE_INVALID_STATE, "sg_fail_pipeline(): pipeline must be in ALLOC state") \
  3742. _SG_LOGITEM_XMACRO(FAIL_VIEW_INVALID_STATE, "sg_fail_view(): view must be in ALLOC state") \
  3743. _SG_LOGITEM_XMACRO(BUFFER_POOL_EXHAUSTED, "buffer pool exhausted") \
  3744. _SG_LOGITEM_XMACRO(IMAGE_POOL_EXHAUSTED, "image pool exhausted") \
  3745. _SG_LOGITEM_XMACRO(SAMPLER_POOL_EXHAUSTED, "sampler pool exhausted") \
  3746. _SG_LOGITEM_XMACRO(SHADER_POOL_EXHAUSTED, "shader pool exhausted") \
  3747. _SG_LOGITEM_XMACRO(PIPELINE_POOL_EXHAUSTED, "pipeline pool exhausted") \
  3748. _SG_LOGITEM_XMACRO(VIEW_POOL_EXHAUSTED, "view pool exhausted") \
  3749. _SG_LOGITEM_XMACRO(BEGINPASS_TOO_MANY_COLOR_ATTACHMENTS, "sg_begin_pass: too many color attachments (sg_limits.max_color_attachments)") \
  3750. _SG_LOGITEM_XMACRO(BEGINPASS_TOO_MANY_RESOLVE_ATTACHMENTS, "sg_begin_pass: too many resolve attachments (sg_limits.max_color_attachments)") \
  3751. _SG_LOGITEM_XMACRO(BEGINPASS_ATTACHMENTS_ALIVE, "sg_begin_pass: an attachment was provided that no longer exists") \
  3752. _SG_LOGITEM_XMACRO(DRAW_WITHOUT_BINDINGS, "attempting to draw without resource bindings") \
  3753. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_VERTEXSTAGE_TEXTURES, "sg_shader_desc: too many texture bindings on vertex shader stage (sg_limits.max_texture_bindings_per_stage)") \
  3754. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_FRAGMENTSTAGE_TEXTURES, "sg_shader_desc: too many texture bindings on fragment shader stage (sg_limits.max_texture_bindings_per_stage)") \
  3755. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_COMPUTESTAGE_TEXTURES, "sg_shader_desc: too many texture bindings on compute shader stage (sg_limits.max_texture_bindings_per_stage)") \
  3756. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_VERTEXSTAGE_STORAGEBUFFERS, "sg_shader_desc: too many storage buffer bindings on vertex shader stage (sg_limits.max_storage_buffer_bindings_per_stage)") \
  3757. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_FRAGMENTSTAGE_STORAGEBUFFERS, "sg_shader_desc: too many storage buffer bindings on fragment shader stage (sg_limits.max_storage_buffer_bindings_per_stage)") \
  3758. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_COMPUTESTAGE_STORAGEBUFFERS, "sg_shader_desc: too many storage buffer bindings on compute shader stage (sg_limits.max_storage_buffer_bindings_per_stage)") \
  3759. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_VERTEXSTAGE_STORAGEIMAGES, "sg_shader_desc: too many storage image bindings on vertex shader stage (sg_limits.max_storage_image_bindings_per_stage)") \
  3760. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_FRAGMENTSTAGE_STORAGEIMAGES, "sg_shader_desc: too many storage image bindings on fragment shader stage (sg_limits.max_storage_image_bindings_per_stage)") \
  3761. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_COMPUTESTAGE_STORAGEIMAGES, "sg_shader_desc: too many storage image bindings on compute shader stage (sg_limits.max_storage_image_bindings_per_stage)") \
  3762. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_VERTEXSTAGE_TEXTURESAMPLERPAIRS, "sg_shader_desc: too many texture-sampler-pairs on vertex shader stage (sg_limits.max_texture_bindings_per_stage)") \
  3763. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_FRAGMENTSTAGE_TEXTURESAMPLERPAIRS, "sg_shader_desc: too many texture-sampler-pairs on fragment shader stage (sg_limits.max_texture_bindings_per_stage)") \
  3764. _SG_LOGITEM_XMACRO(SHADERDESC_TOO_MANY_COMPUTESTAGE_TEXTURESAMPLERPAIRS, "sg_shader_desc: too many texture-sampler-pairs on compute shader stage (sg_limits.max_texture_bindings_per_stage)") \
  3765. _SG_LOGITEM_XMACRO(VALIDATE_BUFFERDESC_CANARY, "sg_buffer_desc not initialized") \
  3766. _SG_LOGITEM_XMACRO(VALIDATE_BUFFERDESC_IMMUTABLE_DYNAMIC_STREAM, "sg_buffer_desc.usage: only one of .immutable, .dynamic_update, .stream_update can be true") \
  3767. _SG_LOGITEM_XMACRO(VALIDATE_BUFFERDESC_SEPARATE_BUFFER_TYPES, "sg_buffer_desc.usage: on WebGL2, only one of .vertex_buffer or .index_buffer can be true (check sg_features.separate_buffer_types)") \
  3768. _SG_LOGITEM_XMACRO(VALIDATE_BUFFERDESC_EXPECT_NONZERO_SIZE, "sg_buffer_desc.size must be greater zero") \
  3769. _SG_LOGITEM_XMACRO(VALIDATE_BUFFERDESC_EXPECT_MATCHING_DATA_SIZE, "sg_buffer_desc.size and .data.size must be equal") \
  3770. _SG_LOGITEM_XMACRO(VALIDATE_BUFFERDESC_EXPECT_ZERO_DATA_SIZE, "sg_buffer_desc.data.size expected to be zero") \
  3771. _SG_LOGITEM_XMACRO(VALIDATE_BUFFERDESC_EXPECT_NO_DATA, "sg_buffer_desc.data.ptr must be null for dynamic/stream buffers") \
  3772. _SG_LOGITEM_XMACRO(VALIDATE_BUFFERDESC_EXPECT_DATA, "sg_buffer_desc: initial content data must be provided for immutable buffers without storage buffer usage") \
  3773. _SG_LOGITEM_XMACRO(VALIDATE_BUFFERDESC_STORAGEBUFFER_SUPPORTED, "storage buffers not supported by the backend 3D API (requires OpenGL >= 4.3)") \
  3774. _SG_LOGITEM_XMACRO(VALIDATE_BUFFERDESC_STORAGEBUFFER_SIZE_MULTIPLE_4, "size of storage buffers must be a multiple of 4") \
  3775. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDATA_NODATA, "sg_image_data: no data (.ptr and/or .size is zero)") \
  3776. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDATA_DATA_SIZE, "sg_image_data: data size doesn't match expected surface size") \
  3777. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_CANARY, "sg_image_desc not initialized") \
  3778. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_IMMUTABLE_DYNAMIC_STREAM, "sg_image_desc.usage: only one of .immutable, .dynamic_update, .stream_update can be true") \
  3779. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_IMAGETYPE_2D_NUMSLICES, "sg_image_desc.num_slices must be exactly 1 for SG_IMAGETYPE_2D") \
  3780. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_IMAGETYPE_CUBE_NUMSLICES, "sg_image_desc.num_slices must be exactly 6 for SG_IMAGETYPE_CUBE") \
  3781. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_IMAGETYPE_ARRAY_NUMSLICES, "sg_image_desc.num_slices must be ((>= 1) && (<= sg_limits.max_image_array_layers)) for SG_IMAGETYPE_ARRAY") \
  3782. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_IMAGETYPE_3D_NUMSLICES, "sg_image_desc.num_slices must be ((>= 1) && (<= sg_limits.max_image_size_3d)) for SG_IMAGETYPE_ARRAY") \
  3783. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_NUMSLICES, "sg_image_desc.num_slices must be > 0") \
  3784. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_WIDTH, "sg_image_desc.width must be > 0") \
  3785. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_HEIGHT, "sg_image_desc.height must be > 0") \
  3786. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_NONRT_PIXELFORMAT, "invalid pixel format for non-render-target image") \
  3787. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_MSAA_BUT_NO_ATTACHMENT, "non-attachment images cannot be multisampled") \
  3788. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_DEPTH_3D_IMAGE, "3D images cannot have a depth/stencil image format") \
  3789. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_ATTACHMENT_EXPECT_IMMUTABLE, "attachment and storage images must be sg_image_usage.immutable") \
  3790. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_ATTACHMENT_EXPECT_NO_DATA, "render/storage attachment images cannot be initialized with data") \
  3791. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_ATTACHMENT_PIXELFORMAT, "invalid pixel format for render attachment image") \
  3792. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_ATTACHMENT_RESOLVE_EXPECT_NO_MSAA, "resolve attachment images cannot be multisampled") \
  3793. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_ATTACHMENT_NO_MSAA_SUPPORT, "multisampling not supported for this pixel format") \
  3794. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_ATTACHMENT_MSAA_NUM_MIPMAPS, "multisample images must have num_mipmaps == 1") \
  3795. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_ATTACHMENT_MSAA_3D_IMAGE, "3D images cannot have a sample_count > 1") \
  3796. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_ATTACHMENT_MSAA_CUBE_IMAGE, "cube images cannot have sample_count > 1") \
  3797. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_ATTACHMENT_MSAA_ARRAY_IMAGE, "array images cannot have sample_count > 1") \
  3798. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_STORAGEIMAGE_PIXELFORMAT, "invalid pixel format for storage image") \
  3799. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_STORAGEIMAGE_EXPECT_NO_MSAA, "storage images cannot be multisampled") \
  3800. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_INJECTED_NO_DATA, "images with injected textures cannot be initialized with data") \
  3801. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_DYNAMIC_NO_DATA, "dynamic/stream-update images cannot be initialized with data") \
  3802. _SG_LOGITEM_XMACRO(VALIDATE_IMAGEDESC_COMPRESSED_IMMUTABLE, "compressed images must be immutable") \
  3803. _SG_LOGITEM_XMACRO(VALIDATE_SAMPLERDESC_CANARY, "sg_sampler_desc not initialized") \
  3804. _SG_LOGITEM_XMACRO(VALIDATE_SAMPLERDESC_ANISTROPIC_REQUIRES_LINEAR_FILTERING, "sg_sampler_desc.max_anisotropy > 1 requires min/mag/mipmap_filter to be SG_FILTER_LINEAR") \
  3805. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_CANARY, "sg_shader_desc not initialized") \
  3806. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VERTEX_SOURCE, "vertex shader source code expected") \
  3807. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_FRAGMENT_SOURCE, "fragment shader source code expected") \
  3808. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_COMPUTE_SOURCE, "compute shader source code expected") \
  3809. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VERTEX_SOURCE_OR_BYTECODE, "vertex shader source or byte code expected") \
  3810. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_FRAGMENT_SOURCE_OR_BYTECODE, "fragment shader source or byte code expected") \
  3811. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_COMPUTE_SOURCE_OR_BYTECODE, "compute shader source or byte code expected") \
  3812. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_INVALID_SHADER_COMBO, "cannot combine compute shaders with vertex or fragment shaders") \
  3813. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_NO_BYTECODE_SIZE, "shader byte code length (in bytes) required") \
  3814. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_METAL_THREADS_PER_THREADGROUP_INITIALIZED, "sg_shader_desc.mtl_threads_per_threadgroup must be initialized for compute shaders (metal)") \
  3815. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_METAL_THREADS_PER_THREADGROUP_MULTIPLE_32, "sg_shader_desc.mtl_threads_per_threadgroup (x * y * z) must be a multiple of 32 (metal)") \
  3816. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_NO_CONT_MEMBERS, "sg_shader_desc.uniform_blocks[].glsl_uniforms[]: items must occupy continuous slots") \
  3817. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_SIZE_IS_ZERO, "sg_shader_desc.uniform_blocks[].size cannot be zero") \
  3818. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_METAL_BUFFER_SLOT_COLLISION, "sg_shader_desc.uniform_blocks[].msl_buffer_n must be unique across uniform blocks and storage buffers in same shader stage") \
  3819. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_HLSL_REGISTER_B_COLLISION, "sg_shader_desc.uniform_blocks[].hlsl_register_b_n must be unique across uniform blocks in same shader stage") \
  3820. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_WGSL_GROUP0_BINDING_COLLISION, "sg_shader_desc.uniform_blocks[].wgsl_group0_binding_n must be unique across all uniform blocks") \
  3821. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_SPIRV_SET0_BINDING_COLLISION, "sg_shader_desc.unifrom_blocks[].spirv_set0_binding_n must be unique across all uniform blocks") \
  3822. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_NO_MEMBERS, "sg_shader_desc.uniform_blocks[].glsl_uniforms[]: GL backend requires uniform block member declarations") \
  3823. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_UNIFORM_GLSL_NAME, "sg_shader_desc.uniform_blocks[].glsl_uniforms[].glsl_name missing") \
  3824. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_SIZE_MISMATCH, "sg_shader_desc.uniform_blocks[].glsl_uniforms[]: size of uniform block members doesn't match uniform block size") \
  3825. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_ARRAY_COUNT, "sg_shader_desc.uniform_blocks[].glsl_uniforms[].array_count must be >= 1") \
  3826. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_UNIFORMBLOCK_STD140_ARRAY_TYPE, "sg_shader_desc.uniform_blocks[].glsl_uniforms[].type: uniform arrays only allowed for FLOAT4, INT4, MAT4 in std140 layout") \
  3827. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_METAL_BUFFER_SLOT_COLLISION, "sg_shader_desc.views[].storage_buffer.storagemsl_buffer_n must be unique across uniform blocks and storage buffer in same shader stage") \
  3828. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_HLSL_REGISTER_T_COLLISION, "sg_shader_desc.views[].storage_buffer.hlsl_register_t_n must be unique across read-only storage buffers and images in same shader stage") \
  3829. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_HLSL_REGISTER_U_COLLISION, "sg_shader_desc.views[].storage_buffer.hlsl_register_u_n must be unique across read/write storage buffers and storage images in same shader stage") \
  3830. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_GLSL_BINDING_COLLISION, "sg_shader_desc.views[].storage_buffer.glsl_binding_n must be unique across shader stages") \
  3831. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_WGSL_GROUP1_BINDING_COLLISION, "sg_shader_desc.views[].storage_buffer.wgsl_group1_binding_n must be unique across all view and sampler bindings") \
  3832. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_SPIRV_SET1_BINDING_COLLISION, "sg_shader_desc.views[].storage_buffer.spirv_set1_binding_n must be unique across all view and sampler bindings") \
  3833. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_EXPECT_COMPUTE_STAGE, "sg_shader_desc.views[].storage_image: storage images are allowed on the compute stage") \
  3834. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_METAL_TEXTURE_SLOT_COLLISION, "sg_shader_desc.views[].storage_image.msl_texture_n must be unique across images and storage images in same shader stage") \
  3835. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_HLSL_REGISTER_U_COLLISION, "sg_shader_desc.views[].storage_image.hlsl_register_u_n must be unique across storage images and read/write storage buffers in same shader stage") \
  3836. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_GLSL_BINDING_COLLISION, "sg_shader_desc.views[].storage_image.glsl_binding_n must be unique across shader stages") \
  3837. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_WGSL_GROUP1_BINDING_COLLISION, "sg_shader_desc.views[].storage_image.wgsl_group1_binding_n must be unique across all view and sampler bindings") \
  3838. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_SPIRV_SET1_BINDING_COLLISION, "sg_shader_desc.views[].storage_image.spirv_set1_binding_n must be unique across all view and sampler bindings") \
  3839. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_TEXTURE_METAL_TEXTURE_SLOT_COLLISION, "sg_shader_desc.views[].texture.msl_texture_n must be unique across textures and storage images in same shader stage") \
  3840. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_TEXTURE_HLSL_REGISTER_T_COLLISION, "sg_shader_desc.views[].texture.hlsl_register_t_n must be unique across textures and storage buffers in same shader stage") \
  3841. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_TEXTURE_WGSL_GROUP1_BINDING_COLLISION, "sg_shader_desc.views[].texture.wgsl_group1_binding_n must be unique across all view and sampler bindings") \
  3842. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_VIEW_TEXTURE_SPIRV_SET1_BINDING_COLLISION, "sg_shader_desc.views[].texture.spirv_set1_binding_n must be unique across all view and sampler bindings") \
  3843. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_SAMPLER_METAL_SAMPLER_SLOT_COLLISION, "sg_shader_desc.samplers[].msl_sampler_n must be unique in same shader stage") \
  3844. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_SAMPLER_HLSL_REGISTER_S_COLLISION, "sg_shader_desc.samplers[].hlsl_register_s_n must be unique in same shader stage") \
  3845. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_SAMPLER_WGSL_GROUP1_BINDING_COLLISION, "sg_shader_desc.samplers[].wgsl_group1_binding_n must be unique across all view and sampler bindings") \
  3846. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_SAMPLER_SPIRV_SET1_BINDING_COLLISION, "sg_shader_desc.samplers[].spirv_set1_binding_n must be unique across all view and sampler bindings") \
  3847. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_VIEW_SLOT_OUT_OF_RANGE, "texture-sampler-pair view slot index is out of range (sg_shader_desc.texture_sampler_pairs[].view_slot)") \
  3848. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_SAMPLER_SLOT_OUT_OF_RANGE, "texture-sampler-pair sampler slot index is out of range (sg_shader_desc.texture_sampler_pairs[].sampler_slot)") \
  3849. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_TEXTURE_STAGE_MISMATCH, "texture-sampler-pair stage doesn't match referenced texture stage") \
  3850. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_EXPECT_TEXTURE_VIEW, "texture-sampler-pair view must be a texture view (sg_shader_desc.texture_sampler_pairs[].view_slot => sg_shaders_desc.views[i].texture)") \
  3851. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_SAMPLER_STAGE_MISMATCH, "texture-sampler-pair stage doesn't match referenced sampler stage") \
  3852. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_GLSL_NAME, "texture-sampler-pair 'glsl_name' missing") \
  3853. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_NONFILTERING_SAMPLER_REQUIRED, "image sample type UNFILTERABLE_FLOAT, UINT, SINT can only be used with NONFILTERING sampler") \
  3854. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_COMPARISON_SAMPLER_REQUIRED, "image sample type DEPTH can only be used with COMPARISON sampler") \
  3855. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_TEXVIEW_NOT_REFERENCED_BY_TEXTURE_SAMPLER_PAIRS, "one or more texture views are not referenced by by texture-sampler-pairs (sg_shader_desc.texture_sampler_pairs[].view_slot)") \
  3856. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_SAMPLER_NOT_REFERENCED_BY_TEXTURE_SAMPLER_PAIRS, "one or more samplers are not referenced by texture-sampler-pairs (sg_shader_desc.texture_sampler_pairs[].sampler_slot)") \
  3857. _SG_LOGITEM_XMACRO(VALIDATE_SHADERDESC_ATTR_STRING_TOO_LONG, "vertex attribute name/semantic string too long (max len 16)") \
  3858. _SG_LOGITEM_XMACRO(VALIDATE_PIPELINEDESC_CANARY, "sg_pipeline_desc not initialized") \
  3859. _SG_LOGITEM_XMACRO(VALIDATE_PIPELINEDESC_SHADER, "sg_pipeline_desc.shader missing or invalid") \
  3860. _SG_LOGITEM_XMACRO(VALIDATE_PIPELINEDESC_COMPUTE_SHADER_EXPECTED, "sg_pipeline_desc.shader must be a compute shader") \
  3861. _SG_LOGITEM_XMACRO(VALIDATE_PIPELINEDESC_NO_COMPUTE_SHADER_EXPECTED, "sg_pipeline_desc.compute is false, but shader is a compute shader") \
  3862. _SG_LOGITEM_XMACRO(VALIDATE_PIPELINEDESC_NO_CONT_ATTRS, "sg_pipeline_desc.layout.attrs is not continuous") \
  3863. _SG_LOGITEM_XMACRO(VALIDATE_PIPELINEDESC_ATTR_BASETYPE_MISMATCH, "sg_pipeline_desc.layout.attrs[].format is incompatible with sg_shader_desc.attrs[].base_type") \
  3864. _SG_LOGITEM_XMACRO(VALIDATE_PIPELINEDESC_LAYOUT_STRIDE4, "sg_pipeline_desc.layout.buffers[].stride must be multiple of 4") \
  3865. _SG_LOGITEM_XMACRO(VALIDATE_PIPELINEDESC_ATTR_SEMANTICS, "D3D11 missing vertex attribute semantics in shader") \
  3866. _SG_LOGITEM_XMACRO(VALIDATE_PIPELINEDESC_SHADER_READONLY_STORAGEBUFFERS, "sg_pipeline_desc.shader: only readonly storage buffer bindings allowed in render pipelines") \
  3867. _SG_LOGITEM_XMACRO(VALIDATE_PIPELINEDESC_BLENDOP_MINMAX_REQUIRES_BLENDFACTOR_ONE, "SG_BLENDOP_MIN/MAX requires all blend factors to be SG_BLENDFACTOR_ONE") \
  3868. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_CANARY, "sg_view_desc not initialized") \
  3869. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_UNIQUE_VIEWTYPE, "sg_view_desc: only one view type can be active") \
  3870. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_ANY_VIEWTYPE, "sg_view_desc: exactly one view type must be active") \
  3871. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_RESOURCE_ALIVE, "sg_view_desc: resource object is no longer alive (.buffer or .image)") \
  3872. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_RESOURCE_FAILED, "sg_view_desc: resource object cannot be in FAILED state (.buffer or .image)") \
  3873. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_STORAGEBUFFER_OFFSET_VS_BUFFER_SIZE, "sg_view_desc.storage_buffer.offset is >= buffer size") \
  3874. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_STORAGEBUFFER_OFFSET_MULTIPLE_256, "sg_view_desc.storage_buffer.offset must be a multiple of 256") \
  3875. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_STORAGEBUFFER_USAGE, "sg_view_desc.storage_buffer.buffer must have been created with sg_buffer_desc.usage.storage_buffer = true") \
  3876. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_STORAGEIMAGE_USAGE, "sg_view_desc.storage_image.image must have been created with sg_image_desc.usage.storage_image = true") \
  3877. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_COLORATTACHMENT_USAGE, "sg_view_desc.color_attachment.image must have been created with sg_image_desc.usage.color_attachment = true") \
  3878. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_RESOLVEATTACHMENT_USAGE, "sg_view_desc.resolve_attachment.image must have been created with sg_image_desc.usage.resolve_attachment = true") \
  3879. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_DEPTHSTENCILATTACHMENT_USAGE, "sg_view_desc.depth_stencil_attachment.image must have been created with sg_image_desc.usage.depth_stencil_attachment = true") \
  3880. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_IMAGE_MIPLEVEL, "sg_view_desc: image/attachment view mip level is out of range (must be >=0 and <image.num_miplevels)") \
  3881. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_IMAGE_2D_SLICE, "sg_view_desc: image/attachment view slice is out of range for 2D image (must be 0)") \
  3882. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_IMAGE_CUBEMAP_SLICE, "sg_view_desc: image/attachment view slice is out of range for cubemap image (must be >=0 and <6)") \
  3883. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_IMAGE_ARRAY_SLICE, "sg_view_desc: image/attachment view slice is out of range for 2D array image (must be >=0 and <image.num_slices") \
  3884. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_IMAGE_3D_SLICE, "sg_view_desc: image/attachment view slice is out of range for 3D image (must be 0)") \
  3885. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_TEXTURE_EXPECT_NO_MSAA, "sg_view_desc: MSAA texture bindings not allowed on this backend (sg_features.msaa_texture_bindings)") \
  3886. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_TEXTURE_MIPLEVELS, "sg_view_desc: texture view mip levels are out of range (must be >=0 and <image.num_miplevels)") \
  3887. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_TEXTURE_2D_SLICES, "sg_view_desc: texture view slices are out of range for 2D image (must be 0)") \
  3888. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_TEXTURE_CUBEMAP_SLICES, "sg_view_desc: texture view slices are out of range for cubemap image (must be 0)") \
  3889. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_TEXTURE_ARRAY_SLICES, "sg_view_desc: texture view slices are out of range for 2D array image (must be >=0 and <image.num_slices") \
  3890. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_TEXTURE_3D_SLICES, "sg_view_desc: texture view slices are out of range for 3D image (must be 0)") \
  3891. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_STORAGEIMAGE_PIXELFORMAT, "sg_view_desc.storage_image_binding: image pixel format must be GPU readable or writable (sg_pixelformat_info.read/write)") \
  3892. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_COLORATTACHMENT_PIXELFORMAT, "sg_view_desc.color_attachment: pixel format of image must be renderable (sg_pixelformat_info.render)") \
  3893. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_DEPTHSTENCILATTACHMENT_PIXELFORMAT, "sg_view_desc.depth_stencil_attachment: pixel format of image must be a depth or depth-stencil format (sg_pixelformat_info.depth)") \
  3894. _SG_LOGITEM_XMACRO(VALIDATE_VIEWDESC_RESOLVEATTACHMENT_SAMPLECOUNT, "sg_view_desc.resolve_attachment: image cannot be multisampled") \
  3895. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_CANARY, "sg_begin_pass: pass struct not initialized") \
  3896. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_COMPUTEPASS_EXPECT_NO_ATTACHMENTS, "sg_begin_pass: compute passes cannot have attachments") \
  3897. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_WIDTH, "sg_begin_pass: expected pass.swapchain.width > 0") \
  3898. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_WIDTH_NOTSET, "sg_begin_pass: expected pass.swapchain.width == 0") \
  3899. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_HEIGHT, "sg_begin_pass: expected pass.swapchain.height > 0") \
  3900. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_HEIGHT_NOTSET, "sg_begin_pass: expected pass.swapchain.height == 0") \
  3901. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_SAMPLECOUNT, "sg_begin_pass: expected pass.swapchain.sample_count > 0") \
  3902. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_SAMPLECOUNT_NOTSET, "sg_begin_pass: expected pass.swapchain.sample_count == 0") \
  3903. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_COLORFORMAT, "sg_begin_pass: expected pass.swapchain.color_format to be valid") \
  3904. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_COLORFORMAT_NOTSET, "sg_begin_pass: expected pass.swapchain.color_format to be unset") \
  3905. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_DEPTHFORMAT_NOTSET, "sg_begin_pass: expected pass.swapchain.depth_format to be unset") \
  3906. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_CURRENTDRAWABLE, "sg_begin_pass: expected pass.swapchain.metal.current_drawable != 0") \
  3907. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_CURRENTDRAWABLE_NOTSET, "sg_begin_pass: expected pass.swapchain.metal.current_drawable == 0") \
  3908. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_DEPTHSTENCILTEXTURE, "sg_begin_pass: expected pass.swapchain.metal.depth_stencil_texture != 0") \
  3909. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_DEPTHSTENCILTEXTURE_NOTSET, "sg_begin_pass: expected pass.swapchain.metal.depth_stencil_texture == 0") \
  3910. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_MSAACOLORTEXTURE, "sg_begin_pass: expected pass.swapchain.metal.msaa_color_texture != 0") \
  3911. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_MSAACOLORTEXTURE_NOTSET, "sg_begin_pass: expected pass.swapchain.metal.msaa_color_texture == 0") \
  3912. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_RENDERVIEW, "sg_begin_pass: expected pass.swapchain.d3d11.render_view != 0") \
  3913. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_RENDERVIEW_NOTSET, "sg_begin_pass: expected pass.swapchain.d3d11.render_view == 0") \
  3914. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_RESOLVEVIEW, "sg_begin_pass: expected pass.swapchain.d3d11.resolve_view != 0") \
  3915. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_RESOLVEVIEW_NOTSET, "sg_begin_pass: expected pass.swapchain.d3d11.resolve_view == 0") \
  3916. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_DEPTHSTENCILVIEW, "sg_begin_pass: expected pass.swapchain.d3d11.depth_stencil_view != 0") \
  3917. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_DEPTHSTENCILVIEW_NOTSET, "sg_begin_pass: expected pass.swapchain.d3d11.depth_stencil_view == 0") \
  3918. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_RENDERVIEW, "sg_begin_pass: expected pass.swapchain.wgpu.render_view != 0") \
  3919. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_RENDERVIEW_NOTSET, "sg_begin_pass: expected pass.swapchain.wgpu.render_view == 0") \
  3920. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_RESOLVEVIEW, "sg_begin_pass: expected pass.swapchain.wgpu.resolve_view != 0") \
  3921. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_RESOLVEVIEW_NOTSET, "sg_begin_pass: expected pass.swapchain.wgpu.resolve_view == 0") \
  3922. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_DEPTHSTENCILVIEW, "sg_begin_pass: expected pass.swapchain.wgpu.depth_stencil_view != 0") \
  3923. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_DEPTHSTENCILVIEW_NOTSET, "sg_begin_pass: expected pass.swapchain.wgpu.depth_stencil_view == 0") \
  3924. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_SWAPCHAIN_GL_EXPECT_FRAMEBUFFER_NOTSET, "sg_begin_pass: expected pass.swapchain.gl.framebuffer == 0") \
  3925. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_COLORATTACHMENTVIEWS_CONTINUOUS, "sg_begin_pass: color attachment view array must be continuous") \
  3926. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_ALIVE, "sg_begin_pass: color attachment view no longer alive") \
  3927. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_VALID, "sg_begin_pass: color attachment view not in valid state (SG_RESOURCESTATE_VALID)") \
  3928. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_TYPE, "sg_begin_pass: color attachment view has wrong type (must be sg_view_desc.color_attachment)") \
  3929. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_IMAGE_ALIVE, "sg_begin_pass: color attachment view's image object is uninitialized or no longer alive") \
  3930. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_IMAGE_VALID, "sg_begin_pass: color attachment view's image is not in valid state (SG_RESOURCESTATE_VALID)") \
  3931. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_SIZES, "sg_begin_pass: all color attachments must have the same width and height") \
  3932. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_SAMPLECOUNT, "sg_begin_pass: when resolve attachments are provided, the color attachment sample count must be > 1") \
  3933. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_SAMPLECOUNTS_EQUAL, "sg_begin_pass: all color attachments must have the same sample count") \
  3934. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_NO_COLORATTACHMENTVIEW, "sg_begin_pass: a resolve attachment view must have an associated color attachment view at the same index") \
  3935. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_ALIVE, "sg_begin_pass: resolve attachment view no longer alive") \
  3936. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_VALID, "sg_begin_pass: resolve attachment view not in valid state (SG_RESOURCESTATE_VALID)") \
  3937. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_TYPE, "sg_begin_pass: resolve attachment view has wrong type (must be sg_view_desc.resolve_attachment)") \
  3938. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_IMAGE_ALIVE, "sg_begin_pass: resolve attachment view's image object is uninitialized or no longer alive") \
  3939. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_IMAGE_VALID, "sg_begin_pass: resolve attachment view's image is not in valid state (SG_RESOURCESTATE_VALID)") \
  3940. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_SIZES, "sg_begin_pass: all attachments must have the same width and height") \
  3941. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEWS_CONTINUOUS, "sg_begin_pass: color attachment view array must be continuous") \
  3942. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_ALIVE, "sg_begin_pass: depth-stencil attachment view no longer alive") \
  3943. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_VALID, "sg_begin_pass: depth-stencil attachment view not in valid state (SG_RESOURCESTATE_VALID)") \
  3944. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_TYPE, "sg_begin_pass: depth-stencil attachment view has wrong type (must be sg_view_desc.depth_stencil_attachment)") \
  3945. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_IMAGE_ALIVE, "sg_begin_pass: depth-stencil attachment view's image object is uninitialized or no longer alive") \
  3946. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_IMAGE_VALID, "sg_begin_pass: depth-stencil attachment view's image is not in valid state (SG_RESOURCESTATE_VALID)") \
  3947. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_SIZES, "sg_begin_pass: attachments must have the same width and height") \
  3948. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_SAMPLECOUNT, "sg_begin_pass: all color attachments must have the same sample count") \
  3949. _SG_LOGITEM_XMACRO(VALIDATE_BEGINPASS_ATTACHMENTS_EXPECTED, "sg_begin_pass: offscreen render passes must have at least one color- or depth-stencil attachment") \
  3950. _SG_LOGITEM_XMACRO(VALIDATE_AVP_RENDERPASS_EXPECTED, "sg_apply_viewport: must be called in a render pass") \
  3951. _SG_LOGITEM_XMACRO(VALIDATE_ASR_RENDERPASS_EXPECTED, "sg_apply_scissor_rect: must be called in a render pass") \
  3952. _SG_LOGITEM_XMACRO(VALIDATE_APIP_PIPELINE_VALID_ID, "sg_apply_pipeline: invalid pipeline id provided") \
  3953. _SG_LOGITEM_XMACRO(VALIDATE_APIP_PIPELINE_EXISTS, "sg_apply_pipeline: pipeline object no longer alive") \
  3954. _SG_LOGITEM_XMACRO(VALIDATE_APIP_PIPELINE_VALID, "sg_apply_pipeline: pipeline object not in valid state (SG_RESOURCESTATE_VALID)") \
  3955. _SG_LOGITEM_XMACRO(VALIDATE_APIP_PASS_EXPECTED, "sg_apply_pipeline: must be called in a pass") \
  3956. _SG_LOGITEM_XMACRO(VALIDATE_APIP_PIPELINE_SHADER_ALIVE, "sg_apply_pipeline: shader object associated with pipeline no longer alive") \
  3957. _SG_LOGITEM_XMACRO(VALIDATE_APIP_PIPELINE_SHADER_VALID, "sg_apply_pipeline: shader object associated with pipeline not in valid state") \
  3958. _SG_LOGITEM_XMACRO(VALIDATE_APIP_COMPUTEPASS_EXPECTED, "sg_apply_pipeline: trying to apply compute pipeline in render pass") \
  3959. _SG_LOGITEM_XMACRO(VALIDATE_APIP_RENDERPASS_EXPECTED, "sg_apply_pipeline: trying to apply render pipeline in compute pass") \
  3960. _SG_LOGITEM_XMACRO(VALIDATE_APIP_SWAPCHAIN_COLOR_COUNT, "sg_apply_pipeline: the pipeline .color_count must be 1 in swapchain render passes") \
  3961. _SG_LOGITEM_XMACRO(VALIDATE_APIP_SWAPCHAIN_COLOR_FORMAT, "sg_apply_pipeline: the pipeline .colors[0].pixel_format doesn't match the sg_pass.swapchain.color_format") \
  3962. _SG_LOGITEM_XMACRO(VALIDATE_APIP_SWAPCHAIN_DEPTH_FORMAT, "sg_apply_pipeline: the pipeline .depth.pixel_format doesn't match the sg_pass.swapchain.depth_format") \
  3963. _SG_LOGITEM_XMACRO(VALIDATE_APIP_SWAPCHAIN_SAMPLE_COUNT, "sg_apply_pipeline: the pipeline .sample_count doesn't match the sg_pass.swapchain.sample_count") \
  3964. _SG_LOGITEM_XMACRO(VALIDATE_APIP_ATTACHMENTS_ALIVE, "sg_apply_pipeline: at least one pass attachment view or base image object is no longer alive") \
  3965. _SG_LOGITEM_XMACRO(VALIDATE_APIP_COLORATTACHMENTS_COUNT, "sg_apply_pipeline: the pipeline .color_count doesn't match the number of render pass color attachments") \
  3966. _SG_LOGITEM_XMACRO(VALIDATE_APIP_COLORATTACHMENTS_VIEW_VALID, "sg_apply_pipeline: a pass color attachment view is not in valid state (SG_RESOURCESTATE_VALID)") \
  3967. _SG_LOGITEM_XMACRO(VALIDATE_APIP_COLORATTACHMENTS_IMAGE_VALID, "sg_apply_pipeline: a pass color attachment view's image object is not in valid state (SG_RESOURCESTATE_VALID)") \
  3968. _SG_LOGITEM_XMACRO(VALIDATE_APIP_COLORATTACHMENTS_FORMAT, "sg_apply_pipeline: a pipeline .colors[n].pixel_format doesn't match sg_pass.attachments.colors[n] image pixel format") \
  3969. _SG_LOGITEM_XMACRO(VALIDATE_APIP_DEPTHSTENCILATTACHMENT_VIEW_VALID, "sg_apply_pipeline: the pass depth-stencil attachment view is not in valid state (SG_RESOURCESTATE_VALID)") \
  3970. _SG_LOGITEM_XMACRO(VALIDATE_APIP_DEPTHSTENCILATTACHMENT_IMAGE_VALID, "sg_apply_pipeline: the pass depth-stencil attachment view's image object is not in valid state (SG_RESOURCESTATE_VALID)") \
  3971. _SG_LOGITEM_XMACRO(VALIDATE_APIP_DEPTHSTENCILATTACHMENT_FORMAT, "sg_apply_pipeline: pipeline .depth.pixel_format doesn't match sg_pass.attachments.depth_stencil image pixel format") \
  3972. _SG_LOGITEM_XMACRO(VALIDATE_APIP_ATTACHMENT_SAMPLE_COUNT, "sg_apply_pipeline: pipeline MSAA sample count doesn't match pass attachment sample count") \
  3973. _SG_LOGITEM_XMACRO(VALIDATE_ABND_PASS_EXPECTED, "sg_apply_bindings: must be called in a pass") \
  3974. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EMPTY_BINDINGS, "sg_apply_bindings: the provided sg_bindings struct is empty") \
  3975. _SG_LOGITEM_XMACRO(VALIDATE_ABND_NO_PIPELINE, "sg_apply_bindings: must be called after sg_apply_pipeline") \
  3976. _SG_LOGITEM_XMACRO(VALIDATE_ABND_PIPELINE_ALIVE, "sg_apply_bindings: currently applied pipeline object no longer alive") \
  3977. _SG_LOGITEM_XMACRO(VALIDATE_ABND_PIPELINE_VALID, "sg_apply_bindings: currently applied pipeline object not in valid state") \
  3978. _SG_LOGITEM_XMACRO(VALIDATE_ABND_PIPELINE_SHADER_ALIVE, "sg_apply_bindings: shader associated with currently applied pipeline is no longer alive") \
  3979. _SG_LOGITEM_XMACRO(VALIDATE_ABND_PIPELINE_SHADER_VALID, "sg_apply_bindings: shader associated with currently applied pipeline is not in valid state") \
  3980. _SG_LOGITEM_XMACRO(VALIDATE_ABND_COMPUTE_EXPECTED_NO_VBUFS, "sg_apply_bindings: vertex buffer bindings not allowed in a compute pass") \
  3981. _SG_LOGITEM_XMACRO(VALIDATE_ABND_COMPUTE_EXPECTED_NO_IBUF, "sg_apply_bindings: index buffer binding not allowed in compute pass") \
  3982. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EXPECTED_VBUF, "sg_apply_bindings: vertex buffer binding is missing or buffer handle is invalid") \
  3983. _SG_LOGITEM_XMACRO(VALIDATE_ABND_VBUF_ALIVE, "sg_apply_bindings: vertex buffer no longer alive") \
  3984. _SG_LOGITEM_XMACRO(VALIDATE_ABND_VBUF_USAGE, "sg_apply_bindings: buffer in vertex buffer bind slot must have usage.vertex_buffer") \
  3985. _SG_LOGITEM_XMACRO(VALIDATE_ABND_VBUF_OVERFLOW, "sg_apply_bindings: buffer in vertex buffer bind slot is overflown") \
  3986. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EXPECTED_NO_IBUF, "sg_apply_bindings: pipeline object defines non-indexed rendering, but index buffer binding provided") \
  3987. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EXPECTED_IBUF, "sg_apply_bindings: pipeline object defines indexed rendering, but no index buffer binding provided") \
  3988. _SG_LOGITEM_XMACRO(VALIDATE_ABND_IBUF_ALIVE, "sg_apply_bindings: index buffer no longer alive") \
  3989. _SG_LOGITEM_XMACRO(VALIDATE_ABND_IBUF_USAGE, "sg_apply_bindings: buffer in index buffer bind slot must have usage.index_buffer") \
  3990. _SG_LOGITEM_XMACRO(VALIDATE_ABND_IBUF_OVERFLOW, "sg_apply_bindings: buffer in index buffer slot is overflown") \
  3991. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EXPECTED_VIEW_BINDING, "sg_apply_bindings: view binding is missing or the view handle is invalid") \
  3992. _SG_LOGITEM_XMACRO(VALIDATE_ABND_VIEW_ALIVE, "sg_apply_bindings: view no longer alive") \
  3993. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EXPECT_TEXVIEW, "sg_apply_bindings: view type mismatch in bindslot (shader expects a texture view)") \
  3994. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EXPECT_SBVIEW, "sg_apply_bindings: view type mismatch in bindslot (shader expects a storage buffer view)") \
  3995. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EXPECT_SIMGVIEW, "sg_apply_bindings: view type mismatch in bindslot (shader expects a storage image view)") \
  3996. _SG_LOGITEM_XMACRO(VALIDATE_ABND_TEXVIEW_IMAGETYPE_MISMATCH, "sg_apply_bindings: image type of bound texture doesn't match shader desc") \
  3997. _SG_LOGITEM_XMACRO(VALIDATE_ABND_TEXVIEW_EXPECTED_MULTISAMPLED_IMAGE, "sg_apply_bindings: texture bindings expects image with sample_count > 1") \
  3998. _SG_LOGITEM_XMACRO(VALIDATE_ABND_TEXVIEW_EXPECTED_NON_MULTISAMPLED_IMAGE, "sg_apply_bindings: texture bindings expects image with sample_count == 1") \
  3999. _SG_LOGITEM_XMACRO(VALIDATE_ABND_TEXVIEW_EXPECTED_FILTERABLE_IMAGE, "sg_apply_bindings: filterable image expected") \
  4000. _SG_LOGITEM_XMACRO(VALIDATE_ABND_TEXVIEW_EXPECTED_DEPTH_IMAGE, "sg_apply_bindings: depth image expected") \
  4001. _SG_LOGITEM_XMACRO(VALIDATE_ABND_SBVIEW_READWRITE_IMMUTABLE, "sg_apply_bindings: storage buffers bound as read/write must have usage immutable") \
  4002. _SG_LOGITEM_XMACRO(VALIDATE_ABND_SIMGVIEW_COMPUTE_PASS_EXPECTED, "sg_apply_bindings: storage image bindings can only appear on compute passes") \
  4003. _SG_LOGITEM_XMACRO(VALIDATE_ABND_SIMGVIEW_IMAGETYPE_MISMATCH, "sg_apply_bindings: image type of bound storage image doesn't match shader desc") \
  4004. _SG_LOGITEM_XMACRO(VALIDATE_ABND_SIMGVIEW_ACCESSFORMAT, "sg_apply_bindings: pixel format of storage image view doesn't match access format in shader desc") \
  4005. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EXPECTED_SAMPLER_BINDING, "sg_apply_bindings: sampler binding is missing or the sampler handle is invalid") \
  4006. _SG_LOGITEM_XMACRO(VALIDATE_ABND_UNEXPECTED_SAMPLER_COMPARE_NEVER, "sg_apply_bindings: shader expects SG_SAMPLERTYPE_COMPARISON but sampler has SG_COMPAREFUNC_NEVER") \
  4007. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EXPECTED_SAMPLER_COMPARE_NEVER, "sg_apply_bindings: shader expects SG_SAMPLERTYPE_FILTERING or SG_SAMPLERTYPE_NONFILTERING but sampler doesn't have SG_COMPAREFUNC_NEVER") \
  4008. _SG_LOGITEM_XMACRO(VALIDATE_ABND_EXPECTED_NONFILTERING_SAMPLER, "sg_apply_bindings: shader expected SG_SAMPLERTYPE_NONFILTERING, but sampler has SG_FILTER_LINEAR filters") \
  4009. _SG_LOGITEM_XMACRO(VALIDATE_ABND_SAMPLER_ALIVE, "sg_apply_bindings: bound sampler no longer alive") \
  4010. _SG_LOGITEM_XMACRO(VALIDATE_ABND_SAMPLER_VALID, "sg_apply_bindings: bound sampler not in valid state") \
  4011. _SG_LOGITEM_XMACRO(VALIDATE_ABND_TEXTURE_BINDING_VS_DEPTHSTENCIL_ATTACHMENT, "sg_apply_bindings: cannot bind texture in the same pass it is used as depth-stencil attachment") \
  4012. _SG_LOGITEM_XMACRO(VALIDATE_ABND_TEXTURE_BINDING_VS_COLOR_ATTACHMENT, "sg_apply_bindings: cannot bind texture in the same pass it is used as color attachment") \
  4013. _SG_LOGITEM_XMACRO(VALIDATE_ABND_TEXTURE_BINDING_VS_RESOLVE_ATTACHMENT, "sg_apply_bindings: cannot bind texture in the same pass it is used as resolve attachment") \
  4014. _SG_LOGITEM_XMACRO(VALIDATE_ABND_TEXTURE_VS_STORAGEIMAGE_BINDING, "sg_apply_bindings: an image cannot be bound as a texture and storage image at the same time") \
  4015. _SG_LOGITEM_XMACRO(VALIDATE_AU_PASS_EXPECTED, "sg_apply_uniforms: must be called in a pass") \
  4016. _SG_LOGITEM_XMACRO(VALIDATE_AU_NO_PIPELINE, "sg_apply_uniforms: must be called after sg_apply_pipeline()") \
  4017. _SG_LOGITEM_XMACRO(VALIDATE_AU_PIPELINE_ALIVE, "sg_apply_uniforms: currently applied pipeline object no longer alive") \
  4018. _SG_LOGITEM_XMACRO(VALIDATE_AU_PIPELINE_VALID, "sg_apply_uniforms: currently applied pipeline object not in valid state") \
  4019. _SG_LOGITEM_XMACRO(VALIDATE_AU_PIPELINE_SHADER_ALIVE, "sg_apply_uniforms: shader associated with currently applied pipeline is no longer alive") \
  4020. _SG_LOGITEM_XMACRO(VALIDATE_AU_PIPELINE_SHADER_VALID, "sg_apply_uniforms: shader associated with currently applied pipeline is not in valid state") \
  4021. _SG_LOGITEM_XMACRO(VALIDATE_AU_NO_UNIFORMBLOCK_AT_SLOT, "sg_apply_uniforms: no uniform block declaration at this shader stage UB slot") \
  4022. _SG_LOGITEM_XMACRO(VALIDATE_AU_SIZE, "sg_apply_uniforms: data size doesn't match declared uniform block size") \
  4023. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_RENDERPASS_EXPECTED, "sg_draw: must be called in a render pass") \
  4024. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_BASEELEMENT_GE_ZERO, "sg_draw: base_element cannot be < 0") \
  4025. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_NUMELEMENTS_GE_ZERO, "sg_draw: num_elements cannot be < 0") \
  4026. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_NUMINSTANCES_GE_ZERO, "sg_draw: num_instances cannot be < 0") \
  4027. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_EX_RENDERPASS_EXPECTED, "sg_draw: must be called in a render pass") \
  4028. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_EX_BASEELEMENT_GE_ZERO, "sg_draw_ex: base_element cannot be < 0") \
  4029. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_EX_NUMELEMENTS_GE_ZERO, "sg_draw_ex: num_elements cannot be < 0") \
  4030. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_EX_NUMINSTANCES_GE_ZERO, "sg_draw_ex: num_instances cannot be < 0") \
  4031. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_EX_BASEINSTANCE_GE_ZERO, "sg_draw_ex: base_instance cannot be < 0") \
  4032. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_EX_BASEVERTEX_VS_INDEXED, "sg_draw_ex(): base_vertex must be == 0 for non-indexed rendering") \
  4033. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_EX_BASEINSTANCE_VS_INSTANCED, "sg_draw_ex(): base_instance must be == 0 for non-instanced rendering") \
  4034. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_EX_BASEVERTEX_NOT_SUPPORTED, "sg_draw_ex(): base_vertex != 0 not supported on this backend (sg_features.draw_base_vertex)") \
  4035. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_EX_BASEINSTANCE_NOT_SUPPORTED, "sg_draw_ex(): base_instance > 0 not supported on this backend (sg_features.draw_base_instance)") \
  4036. _SG_LOGITEM_XMACRO(VALIDATE_DRAW_REQUIRED_BINDINGS_OR_UNIFORMS_MISSING, "sg_draw: call to sg_apply_bindings() and/or sg_apply_uniforms() missing after sg_apply_pipeline()") \
  4037. _SG_LOGITEM_XMACRO(VALIDATE_DISPATCH_COMPUTEPASS_EXPECTED, "sg_dispatch: must be called in a compute pass") \
  4038. _SG_LOGITEM_XMACRO(VALIDATE_DISPATCH_NUMGROUPSX, "sg_dispatch: num_groups_x must be >=0 and <65536") \
  4039. _SG_LOGITEM_XMACRO(VALIDATE_DISPATCH_NUMGROUPSY, "sg_dispatch: num_groups_y must be >=0 and <65536") \
  4040. _SG_LOGITEM_XMACRO(VALIDATE_DISPATCH_NUMGROUPSZ, "sg_dispatch: num_groups_z must be >=0 and <65536") \
  4041. _SG_LOGITEM_XMACRO(VALIDATE_DISPATCH_REQUIRED_BINDINGS_OR_UNIFORMS_MISSING, "sg_dispatch: call to sg_apply_bindings() and/or sg_apply_uniforms() missing after sg_apply_pipeline()") \
  4042. _SG_LOGITEM_XMACRO(VALIDATE_UPDATEBUF_USAGE, "sg_update_buffer: cannot update immutable buffer") \
  4043. _SG_LOGITEM_XMACRO(VALIDATE_UPDATEBUF_SIZE, "sg_update_buffer: update size is bigger than buffer size") \
  4044. _SG_LOGITEM_XMACRO(VALIDATE_UPDATEBUF_ONCE, "sg_update_buffer: only one update allowed per buffer and frame") \
  4045. _SG_LOGITEM_XMACRO(VALIDATE_UPDATEBUF_APPEND, "sg_update_buffer: cannot call sg_update_buffer and sg_append_buffer in same frame") \
  4046. _SG_LOGITEM_XMACRO(VALIDATE_APPENDBUF_USAGE, "sg_append_buffer: cannot append to immutable buffer") \
  4047. _SG_LOGITEM_XMACRO(VALIDATE_APPENDBUF_SIZE, "sg_append_buffer: overall appended size is bigger than buffer size") \
  4048. _SG_LOGITEM_XMACRO(VALIDATE_APPENDBUF_UPDATE, "sg_append_buffer: cannot call sg_append_buffer and sg_update_buffer in same frame") \
  4049. _SG_LOGITEM_XMACRO(VALIDATE_UPDIMG_USAGE, "sg_update_image: cannot update immutable image") \
  4050. _SG_LOGITEM_XMACRO(VALIDATE_UPDIMG_ONCE, "sg_update_image: only one update allowed per image and frame") \
  4051. _SG_LOGITEM_XMACRO(VALIDATION_FAILED, "validation layer checks failed") \
  4052. #define _SG_LOGITEM_XMACRO(item,msg) SG_LOGITEM_##item,
  4053. typedef enum sg_log_item {
  4054. _SG_LOG_ITEMS
  4055. } sg_log_item;
  4056. #undef _SG_LOGITEM_XMACRO
  4057. /*
  4058. sg_desc
  4059. The sg_desc struct contains configuration values for sokol_gfx,
  4060. it is used as parameter to the sg_setup() call.
  4061. The default configuration is:
  4062. .buffer_pool_size 128
  4063. .image_pool_size 128
  4064. .sampler_pool_size 64
  4065. .shader_pool_size 32
  4066. .pipeline_pool_size 64
  4067. .view_pool_size 256
  4068. .uniform_buffer_size 4 MB (4*1024*1024)
  4069. .max_commit_listeners 1024
  4070. .disable_validation false
  4071. .metal.force_managed_storage_mode false
  4072. .metal.use_command_buffer_with_retained_references false
  4073. .wgpu.disable_bindgroups_cache false
  4074. .wgpu.bindgroups_cache_size 1024
  4075. .vulkan.copy_staging_buffer_size 4 MB
  4076. .vulkan.stream_staging_buffer_size 16 MB
  4077. .vulkan.descriptor_buffer_size 16 MB
  4078. .allocator.alloc_fn 0 (in this case, malloc() will be called)
  4079. .allocator.free_fn 0 (in this case, free() will be called)
  4080. .allocator.user_data 0
  4081. .environment.defaults.color_format: default value depends on selected backend:
  4082. all GL backends: SG_PIXELFORMAT_RGBA8
  4083. Metal and D3D11: SG_PIXELFORMAT_BGRA8
  4084. WebGPU: *no default* (must be queried from WebGPU swapchain object)
  4085. .environment.defaults.depth_format: SG_PIXELFORMAT_DEPTH_STENCIL
  4086. .environment.defaults.sample_count: 1
  4087. Metal specific:
  4088. (NOTE: All Objective-C object references are transferred through
  4089. a bridged cast (__bridge const void*) to sokol_gfx, which will use an
  4090. unretained bridged cast (__bridge id<xxx>) to retrieve the Objective-C
  4091. references back. Since the bridge cast is unretained, the caller
  4092. must hold a strong reference to the Objective-C object until sg_setup()
  4093. returns.
  4094. .metal.force_managed_storage_mode
  4095. when enabled, Metal buffers and texture resources are created in managed storage
  4096. mode, otherwise sokol-gfx will decide whether to create buffers and
  4097. textures in managed or shared storage mode (this is mainly a debugging option)
  4098. .metal.use_command_buffer_with_retained_references
  4099. when true, the sokol-gfx Metal backend will use Metal command buffers which
  4100. bump the reference count of resource objects as long as they are inflight,
  4101. this is slower than the default command-buffer-with-unretained-references
  4102. method, this may be a workaround when confronted with lifetime validation
  4103. errors from the Metal validation layer until a proper fix has been implemented
  4104. .environment.metal.device
  4105. a pointer to the MTLDevice object
  4106. D3D11 specific:
  4107. .environment.d3d11.device
  4108. a pointer to the ID3D11Device object, this must have been created
  4109. before sg_setup() is called
  4110. .environment.d3d11.device_context
  4111. a pointer to the ID3D11DeviceContext object
  4112. .d3d11.shader_debugging
  4113. set this to true to compile shaders which are provided as HLSL source
  4114. code with debug information and without optimization, this allows
  4115. shader debugging in tools like RenderDoc, to output source code
  4116. instead of byte code from sokol-shdc, omit the `--binary` cmdline
  4117. option
  4118. WebGPU specific:
  4119. .wgpu.disable_bindgroups_cache
  4120. When this is true, the WebGPU backend will create and immediately
  4121. release a BindGroup object in the sg_apply_bindings() call, only
  4122. use this for debugging purposes.
  4123. .wgpu.bindgroups_cache_size
  4124. The size of the bindgroups cache for re-using BindGroup objects
  4125. between sg_apply_bindings() calls. The smaller the cache size,
  4126. the more likely are cache slot collisions which will cause
  4127. a BindGroups object to be destroyed and a new one created.
  4128. Use the information returned by sg_query_stats() to check
  4129. if this is a frequent occurrence, and increase the cache size as
  4130. needed (the default is 1024).
  4131. NOTE: wgpu_bindgroups_cache_size must be a power-of-2 number!
  4132. .environment.wgpu.device
  4133. a WGPUDevice handle
  4134. Vulkan specific:
  4135. .vulkan.copy_staging_buffer_size
  4136. Size of the staging buffer in bytes for uploading the initial
  4137. content of buffers and images, and for updating
  4138. .usage.dynamic_update resources. The default is 4 MB,
  4139. bigger resource updates are split into multiple chunks
  4140. of the staging buffer size
  4141. .vulkan.stream_staging_buffer_size
  4142. Size of the staging buffer in bytes for updating .usage.stream_update
  4143. resources. The default is 16 MB. The size must be big enough
  4144. to accomodate all update into .usage.stream_update resources.
  4145. Any additional data will cause an error log message and
  4146. incomplete rendering. Note that the actually allocated size
  4147. will be twice as much because the stream-staging-buffer is
  4148. double-buffered.
  4149. .vulkan.descriptor_buffer_size
  4150. Size of the descriptor-upload buffer in bytes. The default
  4151. size is 16 bytes. The size must be big enough to accomodate
  4152. all unifrom-block, view- and sampler-bindings in a single
  4153. frame (assume a worst-case of 256 bytes per binding). Note
  4154. that the actually allocated size will be twice as much
  4155. because the descriptor-buffer is double-buffered.
  4156. When using sokol_gfx.h and sokol_app.h together, consider using the
  4157. helper function sglue_environment() in the sokol_glue.h header to
  4158. initialize the sg_desc.environment nested struct. sglue_environment() returns
  4159. a completely initialized sg_environment struct with information
  4160. provided by sokol_app.h.
  4161. */
  4162. typedef struct sg_environment_defaults {
  4163. sg_pixel_format color_format;
  4164. sg_pixel_format depth_format;
  4165. int sample_count;
  4166. } sg_environment_defaults;
  4167. typedef struct sg_metal_environment {
  4168. const void* device;
  4169. } sg_metal_environment;
  4170. typedef struct sg_d3d11_environment {
  4171. const void* device;
  4172. const void* device_context;
  4173. } sg_d3d11_environment;
  4174. typedef struct sg_wgpu_environment {
  4175. const void* device;
  4176. } sg_wgpu_environment;
  4177. typedef struct sg_vulkan_environment {
  4178. const void* physical_device;
  4179. const void* device;
  4180. const void* queue;
  4181. uint32_t queue_family_index;
  4182. } sg_vulkan_environment;
  4183. typedef struct sg_environment {
  4184. sg_environment_defaults defaults;
  4185. sg_metal_environment metal;
  4186. sg_d3d11_environment d3d11;
  4187. sg_wgpu_environment wgpu;
  4188. sg_vulkan_environment vulkan;
  4189. } sg_environment;
  4190. /*
  4191. sg_commit_listener
  4192. Used with function sg_add_commit_listener() to add a callback
  4193. which will be called in sg_commit(). This is useful for libraries
  4194. building on top of sokol-gfx to be notified about when a frame
  4195. ends (instead of having to guess, or add a manual 'new-frame'
  4196. function.
  4197. */
  4198. typedef struct sg_commit_listener {
  4199. void (*func)(void* user_data);
  4200. void* user_data;
  4201. } sg_commit_listener;
  4202. /*
  4203. sg_allocator
  4204. Used in sg_desc to provide custom memory-alloc and -free functions
  4205. to sokol_gfx.h. If memory management should be overridden, both the
  4206. alloc_fn and free_fn function must be provided (e.g. it's not valid to
  4207. override one function but not the other).
  4208. */
  4209. typedef struct sg_allocator {
  4210. void* (*alloc_fn)(size_t size, void* user_data);
  4211. void (*free_fn)(void* ptr, void* user_data);
  4212. void* user_data;
  4213. } sg_allocator;
  4214. /*
  4215. sg_logger
  4216. Used in sg_desc to provide a logging function. Please be aware
  4217. that without logging function, sokol-gfx will be completely
  4218. silent, e.g. it will not report errors, warnings and
  4219. validation layer messages. For maximum error verbosity,
  4220. compile in debug mode (e.g. NDEBUG *not* defined) and provide a
  4221. compatible logger function in the sg_setup() call
  4222. (for instance the standard logging function from sokol_log.h).
  4223. */
  4224. typedef struct sg_logger {
  4225. void (*func)(
  4226. const char* tag, // always "sg"
  4227. uint32_t log_level, // 0=panic, 1=error, 2=warning, 3=info
  4228. uint32_t log_item_id, // SG_LOGITEM_*
  4229. const char* message_or_null, // a message string, may be nullptr in release mode
  4230. uint32_t line_nr, // line number in sokol_gfx.h
  4231. const char* filename_or_null, // source filename, may be nullptr in release mode
  4232. void* user_data);
  4233. void* user_data;
  4234. } sg_logger;
  4235. typedef struct sg_d3d11_desc {
  4236. bool shader_debugging; // if true, HLSL shaders are compiled with D3DCOMPILE_DEBUG | D3DCOMPILE_SKIP_OPTIMIZATION
  4237. } sg_d3d11_desc;
  4238. typedef struct sg_metal_desc {
  4239. bool force_managed_storage_mode; // for debugging: use Metal managed storage mode for resources even with UMA
  4240. bool use_command_buffer_with_retained_references; // Metal: use a managed MTLCommandBuffer which ref-counts used resources
  4241. } sg_metal_desc;
  4242. typedef struct sg_wgpu_desc {
  4243. bool disable_bindgroups_cache; // set to true to disable the WebGPU backend BindGroup cache
  4244. int bindgroups_cache_size; // number of slots in the WebGPU bindgroup cache (must be 2^N)
  4245. } sg_wgpu_desc;
  4246. typedef struct sg_vulkan_desc {
  4247. int copy_staging_buffer_size; // size of staging buffer for immutable and dynamic resources (default: 4 MB)
  4248. int stream_staging_buffer_size; // size of per-frame staging buffer for updating streaming resources (default: 16 MB)
  4249. int descriptor_buffer_size; // size of per-frame descriptor buffer for updating resource bindings (default: 16 MB)
  4250. } sg_vulkan_desc;
  4251. typedef struct sg_desc {
  4252. uint32_t _start_canary;
  4253. int buffer_pool_size;
  4254. int image_pool_size;
  4255. int sampler_pool_size;
  4256. int shader_pool_size;
  4257. int pipeline_pool_size;
  4258. int view_pool_size;
  4259. int uniform_buffer_size; // max size of all sg_apply_uniform() calls per frame, with worst-case 256 byte alignment
  4260. int max_commit_listeners; // max number of commit listener hook functions
  4261. bool disable_validation; // disable validation layer even in debug mode, useful for tests
  4262. bool enforce_portable_limits; // if true, enforce portable resource binding limits (SG_MAX_PORTABLE_*)
  4263. sg_d3d11_desc d3d11; // d3d11-specific setup parameters
  4264. sg_metal_desc metal; // metal-specific setup parameters
  4265. sg_wgpu_desc wgpu; // webgpu-specific setup parameters
  4266. sg_vulkan_desc vulkan; // vulkan-specific setup parameters
  4267. sg_allocator allocator; // optional memory allocation hooks
  4268. sg_logger logger; // optional log function override
  4269. sg_environment environment; // required externally provided runtime objects and defaults
  4270. uint32_t _end_canary;
  4271. } sg_desc;
  4272. // setup and misc functions
  4273. SOKOL_GFX_API_DECL void sg_setup(const sg_desc* desc);
  4274. SOKOL_GFX_API_DECL void sg_shutdown(void);
  4275. SOKOL_GFX_API_DECL bool sg_isvalid(void);
  4276. SOKOL_GFX_API_DECL void sg_reset_state_cache(void);
  4277. SOKOL_GFX_API_DECL sg_trace_hooks sg_install_trace_hooks(const sg_trace_hooks* trace_hooks);
  4278. SOKOL_GFX_API_DECL void sg_push_debug_group(const char* name);
  4279. SOKOL_GFX_API_DECL void sg_pop_debug_group(void);
  4280. SOKOL_GFX_API_DECL bool sg_add_commit_listener(sg_commit_listener listener);
  4281. SOKOL_GFX_API_DECL bool sg_remove_commit_listener(sg_commit_listener listener);
  4282. // resource creation, destruction and updating
  4283. SOKOL_GFX_API_DECL sg_buffer sg_make_buffer(const sg_buffer_desc* desc);
  4284. SOKOL_GFX_API_DECL sg_image sg_make_image(const sg_image_desc* desc);
  4285. SOKOL_GFX_API_DECL sg_sampler sg_make_sampler(const sg_sampler_desc* desc);
  4286. SOKOL_GFX_API_DECL sg_shader sg_make_shader(const sg_shader_desc* desc);
  4287. SOKOL_GFX_API_DECL sg_pipeline sg_make_pipeline(const sg_pipeline_desc* desc);
  4288. SOKOL_GFX_API_DECL sg_view sg_make_view(const sg_view_desc* desc);
  4289. SOKOL_GFX_API_DECL void sg_destroy_buffer(sg_buffer buf);
  4290. SOKOL_GFX_API_DECL void sg_destroy_image(sg_image img);
  4291. SOKOL_GFX_API_DECL void sg_destroy_sampler(sg_sampler smp);
  4292. SOKOL_GFX_API_DECL void sg_destroy_shader(sg_shader shd);
  4293. SOKOL_GFX_API_DECL void sg_destroy_pipeline(sg_pipeline pip);
  4294. SOKOL_GFX_API_DECL void sg_destroy_view(sg_view view);
  4295. SOKOL_GFX_API_DECL void sg_update_buffer(sg_buffer buf, const sg_range* data);
  4296. SOKOL_GFX_API_DECL void sg_update_image(sg_image img, const sg_image_data* data);
  4297. SOKOL_GFX_API_DECL int sg_append_buffer(sg_buffer buf, const sg_range* data);
  4298. SOKOL_GFX_API_DECL bool sg_query_buffer_overflow(sg_buffer buf);
  4299. SOKOL_GFX_API_DECL bool sg_query_buffer_will_overflow(sg_buffer buf, size_t size);
  4300. // render and compute functions
  4301. SOKOL_GFX_API_DECL void sg_begin_pass(const sg_pass* pass);
  4302. SOKOL_GFX_API_DECL void sg_apply_viewport(int x, int y, int width, int height, bool origin_top_left);
  4303. SOKOL_GFX_API_DECL void sg_apply_viewportf(float x, float y, float width, float height, bool origin_top_left);
  4304. SOKOL_GFX_API_DECL void sg_apply_scissor_rect(int x, int y, int width, int height, bool origin_top_left);
  4305. SOKOL_GFX_API_DECL void sg_apply_scissor_rectf(float x, float y, float width, float height, bool origin_top_left);
  4306. SOKOL_GFX_API_DECL void sg_apply_pipeline(sg_pipeline pip);
  4307. SOKOL_GFX_API_DECL void sg_apply_bindings(const sg_bindings* bindings);
  4308. SOKOL_GFX_API_DECL void sg_apply_uniforms(int ub_slot, const sg_range* data);
  4309. SOKOL_GFX_API_DECL void sg_draw(int base_element, int num_elements, int num_instances);
  4310. SOKOL_GFX_API_DECL void sg_draw_ex(int base_element, int num_elements, int num_instances, int base_vertex, int base_instance);
  4311. SOKOL_GFX_API_DECL void sg_dispatch(int num_groups_x, int num_groups_y, int num_groups_z);
  4312. SOKOL_GFX_API_DECL void sg_end_pass(void);
  4313. SOKOL_GFX_API_DECL void sg_commit(void);
  4314. // getting information
  4315. SOKOL_GFX_API_DECL sg_desc sg_query_desc(void);
  4316. SOKOL_GFX_API_DECL sg_backend sg_query_backend(void);
  4317. SOKOL_GFX_API_DECL sg_features sg_query_features(void);
  4318. SOKOL_GFX_API_DECL sg_limits sg_query_limits(void);
  4319. SOKOL_GFX_API_DECL sg_pixelformat_info sg_query_pixelformat(sg_pixel_format fmt);
  4320. SOKOL_GFX_API_DECL int sg_query_row_pitch(sg_pixel_format fmt, int width, int row_align_bytes);
  4321. SOKOL_GFX_API_DECL int sg_query_surface_pitch(sg_pixel_format fmt, int width, int height, int row_align_bytes);
  4322. // get current state of a resource (INITIAL, ALLOC, VALID, FAILED, INVALID)
  4323. SOKOL_GFX_API_DECL sg_resource_state sg_query_buffer_state(sg_buffer buf);
  4324. SOKOL_GFX_API_DECL sg_resource_state sg_query_image_state(sg_image img);
  4325. SOKOL_GFX_API_DECL sg_resource_state sg_query_sampler_state(sg_sampler smp);
  4326. SOKOL_GFX_API_DECL sg_resource_state sg_query_shader_state(sg_shader shd);
  4327. SOKOL_GFX_API_DECL sg_resource_state sg_query_pipeline_state(sg_pipeline pip);
  4328. SOKOL_GFX_API_DECL sg_resource_state sg_query_view_state(sg_view view);
  4329. // get runtime information about a resource
  4330. SOKOL_GFX_API_DECL sg_buffer_info sg_query_buffer_info(sg_buffer buf);
  4331. SOKOL_GFX_API_DECL sg_image_info sg_query_image_info(sg_image img);
  4332. SOKOL_GFX_API_DECL sg_sampler_info sg_query_sampler_info(sg_sampler smp);
  4333. SOKOL_GFX_API_DECL sg_shader_info sg_query_shader_info(sg_shader shd);
  4334. SOKOL_GFX_API_DECL sg_pipeline_info sg_query_pipeline_info(sg_pipeline pip);
  4335. SOKOL_GFX_API_DECL sg_view_info sg_query_view_info(sg_view view);
  4336. // get desc structs matching a specific resource (NOTE that not all creation attributes may be provided)
  4337. SOKOL_GFX_API_DECL sg_buffer_desc sg_query_buffer_desc(sg_buffer buf);
  4338. SOKOL_GFX_API_DECL sg_image_desc sg_query_image_desc(sg_image img);
  4339. SOKOL_GFX_API_DECL sg_sampler_desc sg_query_sampler_desc(sg_sampler smp);
  4340. SOKOL_GFX_API_DECL sg_shader_desc sg_query_shader_desc(sg_shader shd);
  4341. SOKOL_GFX_API_DECL sg_pipeline_desc sg_query_pipeline_desc(sg_pipeline pip);
  4342. SOKOL_GFX_API_DECL sg_view_desc sg_query_view_desc(sg_view view);
  4343. // get resource creation desc struct with their default values replaced
  4344. SOKOL_GFX_API_DECL sg_buffer_desc sg_query_buffer_defaults(const sg_buffer_desc* desc);
  4345. SOKOL_GFX_API_DECL sg_image_desc sg_query_image_defaults(const sg_image_desc* desc);
  4346. SOKOL_GFX_API_DECL sg_sampler_desc sg_query_sampler_defaults(const sg_sampler_desc* desc);
  4347. SOKOL_GFX_API_DECL sg_shader_desc sg_query_shader_defaults(const sg_shader_desc* desc);
  4348. SOKOL_GFX_API_DECL sg_pipeline_desc sg_query_pipeline_defaults(const sg_pipeline_desc* desc);
  4349. SOKOL_GFX_API_DECL sg_view_desc sg_query_view_defaults(const sg_view_desc* desc);
  4350. // assorted query functions
  4351. SOKOL_GFX_API_DECL size_t sg_query_buffer_size(sg_buffer buf);
  4352. SOKOL_GFX_API_DECL sg_buffer_usage sg_query_buffer_usage(sg_buffer buf);
  4353. SOKOL_GFX_API_DECL sg_image_type sg_query_image_type(sg_image img);
  4354. SOKOL_GFX_API_DECL int sg_query_image_width(sg_image img);
  4355. SOKOL_GFX_API_DECL int sg_query_image_height(sg_image img);
  4356. SOKOL_GFX_API_DECL int sg_query_image_num_slices(sg_image img);
  4357. SOKOL_GFX_API_DECL int sg_query_image_num_mipmaps(sg_image img);
  4358. SOKOL_GFX_API_DECL sg_pixel_format sg_query_image_pixelformat(sg_image img);
  4359. SOKOL_GFX_API_DECL sg_image_usage sg_query_image_usage(sg_image img);
  4360. SOKOL_GFX_API_DECL int sg_query_image_sample_count(sg_image img);
  4361. SOKOL_GFX_API_DECL sg_view_type sg_query_view_type(sg_view view);
  4362. SOKOL_GFX_API_DECL sg_image sg_query_view_image(sg_view view);
  4363. SOKOL_GFX_API_DECL sg_buffer sg_query_view_buffer(sg_view view);
  4364. // separate resource allocation and initialization (for async setup)
  4365. SOKOL_GFX_API_DECL sg_buffer sg_alloc_buffer(void);
  4366. SOKOL_GFX_API_DECL sg_image sg_alloc_image(void);
  4367. SOKOL_GFX_API_DECL sg_sampler sg_alloc_sampler(void);
  4368. SOKOL_GFX_API_DECL sg_shader sg_alloc_shader(void);
  4369. SOKOL_GFX_API_DECL sg_pipeline sg_alloc_pipeline(void);
  4370. SOKOL_GFX_API_DECL sg_view sg_alloc_view(void);
  4371. SOKOL_GFX_API_DECL void sg_dealloc_buffer(sg_buffer buf);
  4372. SOKOL_GFX_API_DECL void sg_dealloc_image(sg_image img);
  4373. SOKOL_GFX_API_DECL void sg_dealloc_sampler(sg_sampler smp);
  4374. SOKOL_GFX_API_DECL void sg_dealloc_shader(sg_shader shd);
  4375. SOKOL_GFX_API_DECL void sg_dealloc_pipeline(sg_pipeline pip);
  4376. SOKOL_GFX_API_DECL void sg_dealloc_view(sg_view view);
  4377. SOKOL_GFX_API_DECL void sg_init_buffer(sg_buffer buf, const sg_buffer_desc* desc);
  4378. SOKOL_GFX_API_DECL void sg_init_image(sg_image img, const sg_image_desc* desc);
  4379. SOKOL_GFX_API_DECL void sg_init_sampler(sg_sampler smg, const sg_sampler_desc* desc);
  4380. SOKOL_GFX_API_DECL void sg_init_shader(sg_shader shd, const sg_shader_desc* desc);
  4381. SOKOL_GFX_API_DECL void sg_init_pipeline(sg_pipeline pip, const sg_pipeline_desc* desc);
  4382. SOKOL_GFX_API_DECL void sg_init_view(sg_view view, const sg_view_desc* desc);
  4383. SOKOL_GFX_API_DECL void sg_uninit_buffer(sg_buffer buf);
  4384. SOKOL_GFX_API_DECL void sg_uninit_image(sg_image img);
  4385. SOKOL_GFX_API_DECL void sg_uninit_sampler(sg_sampler smp);
  4386. SOKOL_GFX_API_DECL void sg_uninit_shader(sg_shader shd);
  4387. SOKOL_GFX_API_DECL void sg_uninit_pipeline(sg_pipeline pip);
  4388. SOKOL_GFX_API_DECL void sg_uninit_view(sg_view view);
  4389. SOKOL_GFX_API_DECL void sg_fail_buffer(sg_buffer buf);
  4390. SOKOL_GFX_API_DECL void sg_fail_image(sg_image img);
  4391. SOKOL_GFX_API_DECL void sg_fail_sampler(sg_sampler smp);
  4392. SOKOL_GFX_API_DECL void sg_fail_shader(sg_shader shd);
  4393. SOKOL_GFX_API_DECL void sg_fail_pipeline(sg_pipeline pip);
  4394. SOKOL_GFX_API_DECL void sg_fail_view(sg_view view);
  4395. // frame and total stats
  4396. SOKOL_GFX_API_DECL void sg_enable_stats(void);
  4397. SOKOL_GFX_API_DECL void sg_disable_stats(void);
  4398. SOKOL_GFX_API_DECL bool sg_stats_enabled(void);
  4399. SOKOL_GFX_API_DECL sg_stats sg_query_stats(void);
  4400. /* Backend-specific structs and functions, these may come in handy for mixing
  4401. sokol-gfx rendering with 'native backend' rendering functions.
  4402. This group of functions will be expanded as needed.
  4403. */
  4404. typedef struct sg_d3d11_buffer_info {
  4405. const void* buf; // ID3D11Buffer*
  4406. } sg_d3d11_buffer_info;
  4407. typedef struct sg_d3d11_image_info {
  4408. const void* tex2d; // ID3D11Texture2D*
  4409. const void* tex3d; // ID3D11Texture3D*
  4410. const void* res; // ID3D11Resource* (either tex2d or tex3d)
  4411. } sg_d3d11_image_info;
  4412. typedef struct sg_d3d11_sampler_info {
  4413. const void* smp; // ID3D11SamplerState*
  4414. } sg_d3d11_sampler_info;
  4415. typedef struct sg_d3d11_shader_info {
  4416. const void* cbufs[SG_MAX_UNIFORMBLOCK_BINDSLOTS]; // ID3D11Buffer* (constant buffers by bind slot)
  4417. const void* vs; // ID3D11VertexShader*
  4418. const void* fs; // ID3D11PixelShader*
  4419. } sg_d3d11_shader_info;
  4420. typedef struct sg_d3d11_pipeline_info {
  4421. const void* il; // ID3D11InputLayout*
  4422. const void* rs; // ID3D11RasterizerState*
  4423. const void* dss; // ID3D11DepthStencilState*
  4424. const void* bs; // ID3D11BlendState*
  4425. } sg_d3d11_pipeline_info;
  4426. typedef struct sg_d3d11_view_info {
  4427. const void* srv; // ID3D11ShaderResourceView
  4428. const void* uav; // ID3D11UnorderedAccessView
  4429. const void* rtv; // ID3D11RenderTargetView
  4430. const void* dsv; // ID3D11DepthStencilView
  4431. } sg_d3d11_view_info;
  4432. typedef struct sg_mtl_buffer_info {
  4433. const void* buf[SG_NUM_INFLIGHT_FRAMES]; // id<MTLBuffer>
  4434. int active_slot;
  4435. } sg_mtl_buffer_info;
  4436. typedef struct sg_mtl_image_info {
  4437. const void* tex[SG_NUM_INFLIGHT_FRAMES]; // id<MTLTexture>
  4438. int active_slot;
  4439. } sg_mtl_image_info;
  4440. typedef struct sg_mtl_sampler_info {
  4441. const void* smp; // id<MTLSamplerState>
  4442. } sg_mtl_sampler_info;
  4443. typedef struct sg_mtl_shader_info {
  4444. const void* vertex_lib; // id<MTLLibrary>
  4445. const void* fragment_lib; // id<MTLLibrary>
  4446. const void* vertex_func; // id<MTLFunction>
  4447. const void* fragment_func; // id<MTLFunction>
  4448. } sg_mtl_shader_info;
  4449. typedef struct sg_mtl_pipeline_info {
  4450. const void* rps; // id<MTLRenderPipelineState>
  4451. const void* dss; // id<MTLDepthStencilState>
  4452. } sg_mtl_pipeline_info;
  4453. typedef struct sg_wgpu_buffer_info {
  4454. const void* buf; // WGPUBuffer
  4455. } sg_wgpu_buffer_info;
  4456. typedef struct sg_wgpu_image_info {
  4457. const void* tex; // WGPUTexture
  4458. } sg_wgpu_image_info;
  4459. typedef struct sg_wgpu_sampler_info {
  4460. const void* smp; // WGPUSampler
  4461. } sg_wgpu_sampler_info;
  4462. typedef struct sg_wgpu_shader_info {
  4463. const void* vs_mod; // WGPUShaderModule
  4464. const void* fs_mod; // WGPUShaderModule
  4465. const void* bgl; // WGPUBindGroupLayout;
  4466. } sg_wgpu_shader_info;
  4467. typedef struct sg_wgpu_pipeline_info {
  4468. const void* render_pipeline; // WGPURenderPipeline
  4469. const void* compute_pipeline; // WGPUComputePipeline
  4470. } sg_wgpu_pipeline_info;
  4471. typedef struct sg_wgpu_view_info {
  4472. const void* view; // WGPUTextureView
  4473. } sg_wgpu_view_info;
  4474. typedef struct sg_gl_buffer_info {
  4475. uint32_t buf[SG_NUM_INFLIGHT_FRAMES];
  4476. int active_slot;
  4477. } sg_gl_buffer_info;
  4478. typedef struct sg_gl_image_info {
  4479. uint32_t tex[SG_NUM_INFLIGHT_FRAMES];
  4480. uint32_t tex_target;
  4481. int active_slot;
  4482. } sg_gl_image_info;
  4483. typedef struct sg_gl_sampler_info {
  4484. uint32_t smp;
  4485. } sg_gl_sampler_info;
  4486. typedef struct sg_gl_shader_info {
  4487. uint32_t prog;
  4488. } sg_gl_shader_info;
  4489. typedef struct sg_gl_view_info {
  4490. uint32_t tex_view[SG_NUM_INFLIGHT_FRAMES];
  4491. uint32_t msaa_render_buffer;
  4492. uint32_t msaa_resolve_frame_buffer;
  4493. } sg_gl_view_info;
  4494. // D3D11: return ID3D11Device
  4495. SOKOL_GFX_API_DECL const void* sg_d3d11_device(void);
  4496. // D3D11: return ID3D11DeviceContext
  4497. SOKOL_GFX_API_DECL const void* sg_d3d11_device_context(void);
  4498. // D3D11: get internal buffer resource objects
  4499. SOKOL_GFX_API_DECL sg_d3d11_buffer_info sg_d3d11_query_buffer_info(sg_buffer buf);
  4500. // D3D11: get internal image resource objects
  4501. SOKOL_GFX_API_DECL sg_d3d11_image_info sg_d3d11_query_image_info(sg_image img);
  4502. // D3D11: get internal sampler resource objects
  4503. SOKOL_GFX_API_DECL sg_d3d11_sampler_info sg_d3d11_query_sampler_info(sg_sampler smp);
  4504. // D3D11: get internal shader resource objects
  4505. SOKOL_GFX_API_DECL sg_d3d11_shader_info sg_d3d11_query_shader_info(sg_shader shd);
  4506. // D3D11: get internal pipeline resource objects
  4507. SOKOL_GFX_API_DECL sg_d3d11_pipeline_info sg_d3d11_query_pipeline_info(sg_pipeline pip);
  4508. // D3D11: get internal view resource objects
  4509. SOKOL_GFX_API_DECL sg_d3d11_view_info sg_d3d11_query_view_info(sg_view view);
  4510. // Metal: return __bridge-casted MTLDevice
  4511. SOKOL_GFX_API_DECL const void* sg_mtl_device(void);
  4512. // Metal: return __bridge-casted MTLRenderCommandEncoder when inside render pass (otherwise zero)
  4513. SOKOL_GFX_API_DECL const void* sg_mtl_render_command_encoder(void);
  4514. // Metal: return __bridge-casted MTLComputeCommandEncoder when inside compute pass (otherwise zero)
  4515. SOKOL_GFX_API_DECL const void* sg_mtl_compute_command_encoder(void);
  4516. // Metal: get internal __bridge-casted buffer resource objects
  4517. SOKOL_GFX_API_DECL sg_mtl_buffer_info sg_mtl_query_buffer_info(sg_buffer buf);
  4518. // Metal: get internal __bridge-casted image resource objects
  4519. SOKOL_GFX_API_DECL sg_mtl_image_info sg_mtl_query_image_info(sg_image img);
  4520. // Metal: get internal __bridge-casted sampler resource objects
  4521. SOKOL_GFX_API_DECL sg_mtl_sampler_info sg_mtl_query_sampler_info(sg_sampler smp);
  4522. // Metal: get internal __bridge-casted shader resource objects
  4523. SOKOL_GFX_API_DECL sg_mtl_shader_info sg_mtl_query_shader_info(sg_shader shd);
  4524. // Metal: get internal __bridge-casted pipeline resource objects
  4525. SOKOL_GFX_API_DECL sg_mtl_pipeline_info sg_mtl_query_pipeline_info(sg_pipeline pip);
  4526. // WebGPU: return WGPUDevice object
  4527. SOKOL_GFX_API_DECL const void* sg_wgpu_device(void);
  4528. // WebGPU: return WGPUQueue object
  4529. SOKOL_GFX_API_DECL const void* sg_wgpu_queue(void);
  4530. // WebGPU: return this frame's WGPUCommandEncoder
  4531. SOKOL_GFX_API_DECL const void* sg_wgpu_command_encoder(void);
  4532. // WebGPU: return WGPURenderPassEncoder of current pass (returns 0 when outside pass or in a compute pass)
  4533. SOKOL_GFX_API_DECL const void* sg_wgpu_render_pass_encoder(void);
  4534. // WebGPU: return WGPUComputePassEncoder of current pass (returns 0 when outside pass or in a render pass)
  4535. SOKOL_GFX_API_DECL const void* sg_wgpu_compute_pass_encoder(void);
  4536. // WebGPU: get internal buffer resource objects
  4537. SOKOL_GFX_API_DECL sg_wgpu_buffer_info sg_wgpu_query_buffer_info(sg_buffer buf);
  4538. // WebGPU: get internal image resource objects
  4539. SOKOL_GFX_API_DECL sg_wgpu_image_info sg_wgpu_query_image_info(sg_image img);
  4540. // WebGPU: get internal sampler resource objects
  4541. SOKOL_GFX_API_DECL sg_wgpu_sampler_info sg_wgpu_query_sampler_info(sg_sampler smp);
  4542. // WebGPU: get internal shader resource objects
  4543. SOKOL_GFX_API_DECL sg_wgpu_shader_info sg_wgpu_query_shader_info(sg_shader shd);
  4544. // WebGPU: get internal pipeline resource objects
  4545. SOKOL_GFX_API_DECL sg_wgpu_pipeline_info sg_wgpu_query_pipeline_info(sg_pipeline pip);
  4546. // WebGPU: get internal view resource objects
  4547. SOKOL_GFX_API_DECL sg_wgpu_view_info sg_wgpu_query_view_info(sg_view view);
  4548. // GL: get internal buffer resource objects
  4549. SOKOL_GFX_API_DECL sg_gl_buffer_info sg_gl_query_buffer_info(sg_buffer buf);
  4550. // GL: get internal image resource objects
  4551. SOKOL_GFX_API_DECL sg_gl_image_info sg_gl_query_image_info(sg_image img);
  4552. // GL: get internal sampler resource objects
  4553. SOKOL_GFX_API_DECL sg_gl_sampler_info sg_gl_query_sampler_info(sg_sampler smp);
  4554. // GL: get internal shader resource objects
  4555. SOKOL_GFX_API_DECL sg_gl_shader_info sg_gl_query_shader_info(sg_shader shd);
  4556. // GL: get internal view resource objects
  4557. SOKOL_GFX_API_DECL sg_gl_view_info sg_gl_query_view_info(sg_view view);
  4558. #ifdef __cplusplus
  4559. } // extern "C"
  4560. // reference-based equivalents for c++
  4561. inline void sg_setup(const sg_desc& desc) { return sg_setup(&desc); }
  4562. inline sg_buffer sg_make_buffer(const sg_buffer_desc& desc) { return sg_make_buffer(&desc); }
  4563. inline sg_image sg_make_image(const sg_image_desc& desc) { return sg_make_image(&desc); }
  4564. inline sg_sampler sg_make_sampler(const sg_sampler_desc& desc) { return sg_make_sampler(&desc); }
  4565. inline sg_shader sg_make_shader(const sg_shader_desc& desc) { return sg_make_shader(&desc); }
  4566. inline sg_pipeline sg_make_pipeline(const sg_pipeline_desc& desc) { return sg_make_pipeline(&desc); }
  4567. inline sg_view sg_make_view(const sg_view_desc& desc) { return sg_make_view(&desc); }
  4568. inline void sg_update_image(sg_image img, const sg_image_data& data) { return sg_update_image(img, &data); }
  4569. inline void sg_begin_pass(const sg_pass& pass) { return sg_begin_pass(&pass); }
  4570. inline void sg_apply_bindings(const sg_bindings& bindings) { return sg_apply_bindings(&bindings); }
  4571. inline void sg_apply_uniforms(int ub_slot, const sg_range& data) { return sg_apply_uniforms(ub_slot, &data); }
  4572. inline sg_buffer_desc sg_query_buffer_defaults(const sg_buffer_desc& desc) { return sg_query_buffer_defaults(&desc); }
  4573. inline sg_image_desc sg_query_image_defaults(const sg_image_desc& desc) { return sg_query_image_defaults(&desc); }
  4574. inline sg_sampler_desc sg_query_sampler_defaults(const sg_sampler_desc& desc) { return sg_query_sampler_defaults(&desc); }
  4575. inline sg_shader_desc sg_query_shader_defaults(const sg_shader_desc& desc) { return sg_query_shader_defaults(&desc); }
  4576. inline sg_pipeline_desc sg_query_pipeline_defaults(const sg_pipeline_desc& desc) { return sg_query_pipeline_defaults(&desc); }
  4577. inline sg_view_desc sg_query_view_defaults(const sg_view_desc& desc) { return sg_query_view_defaults(&desc); }
  4578. inline void sg_init_buffer(sg_buffer buf, const sg_buffer_desc& desc) { return sg_init_buffer(buf, &desc); }
  4579. inline void sg_init_image(sg_image img, const sg_image_desc& desc) { return sg_init_image(img, &desc); }
  4580. inline void sg_init_sampler(sg_sampler smp, const sg_sampler_desc& desc) { return sg_init_sampler(smp, &desc); }
  4581. inline void sg_init_shader(sg_shader shd, const sg_shader_desc& desc) { return sg_init_shader(shd, &desc); }
  4582. inline void sg_init_pipeline(sg_pipeline pip, const sg_pipeline_desc& desc) { return sg_init_pipeline(pip, &desc); }
  4583. inline void sg_init_view(sg_view view, const sg_view_desc& desc) { return sg_init_view(view, &desc); }
  4584. inline void sg_update_buffer(sg_buffer buf_id, const sg_range& data) { return sg_update_buffer(buf_id, &data); }
  4585. inline int sg_append_buffer(sg_buffer buf_id, const sg_range& data) { return sg_append_buffer(buf_id, &data); }
  4586. #endif
  4587. #endif // SOKOL_GFX_INCLUDED
  4588. // ██ ███ ███ ██████ ██ ███████ ███ ███ ███████ ███ ██ ████████ █████ ████████ ██ ██████ ███ ██
  4589. // ██ ████ ████ ██ ██ ██ ██ ████ ████ ██ ████ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██
  4590. // ██ ██ ████ ██ ██████ ██ █████ ██ ████ ██ █████ ██ ██ ██ ██ ███████ ██ ██ ██ ██ ██ ██ ██
  4591. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  4592. // ██ ██ ██ ██ ███████ ███████ ██ ██ ███████ ██ ████ ██ ██ ██ ██ ██ ██████ ██ ████
  4593. //
  4594. // >>implementation
  4595. #ifdef SOKOL_GFX_IMPL
  4596. #define SOKOL_GFX_IMPL_INCLUDED (1)
  4597. #if !(defined(SOKOL_GLCORE)||defined(SOKOL_GLES3)||defined(SOKOL_D3D11)||defined(SOKOL_METAL)||defined(SOKOL_WGPU)||defined(SOKOL_VULKAN)||defined(SOKOL_DUMMY_BACKEND))
  4598. #error "Please select a backend with SOKOL_GLCORE, SOKOL_GLES3, SOKOL_D3D11, SOKOL_METAL, SOKOL_WGPU, SOKOL_VULKAN or SOKOL_DUMMY_BACKEND"
  4599. #endif
  4600. #if defined(SOKOL_MALLOC) || defined(SOKOL_CALLOC) || defined(SOKOL_FREE)
  4601. #error "SOKOL_MALLOC/CALLOC/FREE macros are no longer supported, please use sg_desc.allocator to override memory allocation functions"
  4602. #endif
  4603. #include <stdlib.h> // malloc, free, qsort
  4604. #include <string.h> // memset
  4605. #include <float.h> // FLT_MAX
  4606. #ifndef SOKOL_API_IMPL
  4607. #define SOKOL_API_IMPL
  4608. #endif
  4609. #ifndef SOKOL_DEBUG
  4610. #ifndef NDEBUG
  4611. #define SOKOL_DEBUG
  4612. #endif
  4613. #endif
  4614. #ifndef SOKOL_ASSERT
  4615. #include <assert.h>
  4616. #define SOKOL_ASSERT(c) assert(c)
  4617. #endif
  4618. #ifndef SOKOL_UNREACHABLE
  4619. #define SOKOL_UNREACHABLE SOKOL_ASSERT(false)
  4620. #endif
  4621. #ifndef _SOKOL_PRIVATE
  4622. #if defined(__GNUC__) || defined(__clang__)
  4623. #define _SOKOL_PRIVATE __attribute__((unused)) static
  4624. #else
  4625. #define _SOKOL_PRIVATE static
  4626. #endif
  4627. #endif
  4628. #ifndef _SOKOL_UNUSED
  4629. #define _SOKOL_UNUSED(x) (void)(x)
  4630. #endif
  4631. #if defined(SOKOL_TRACE_HOOKS)
  4632. #define _SG_TRACE_ARGS(fn, ...) if (_sg.hooks.fn) { _sg.hooks.fn(__VA_ARGS__, _sg.hooks.user_data); }
  4633. #define _SG_TRACE_NOARGS(fn) if (_sg.hooks.fn) { _sg.hooks.fn(_sg.hooks.user_data); }
  4634. #else
  4635. #define _SG_TRACE_ARGS(fn, ...)
  4636. #define _SG_TRACE_NOARGS(fn)
  4637. #endif
  4638. #ifdef __cplusplus
  4639. #define _SG_STRUCT(TYPE, NAME) TYPE NAME = {}
  4640. #else
  4641. #define _SG_STRUCT(TYPE, NAME) TYPE NAME = {0}
  4642. #endif
  4643. // default clear values
  4644. #ifndef SG_DEFAULT_CLEAR_RED
  4645. #define SG_DEFAULT_CLEAR_RED (0.5f)
  4646. #endif
  4647. #ifndef SG_DEFAULT_CLEAR_GREEN
  4648. #define SG_DEFAULT_CLEAR_GREEN (0.5f)
  4649. #endif
  4650. #ifndef SG_DEFAULT_CLEAR_BLUE
  4651. #define SG_DEFAULT_CLEAR_BLUE (0.5f)
  4652. #endif
  4653. #ifndef SG_DEFAULT_CLEAR_ALPHA
  4654. #define SG_DEFAULT_CLEAR_ALPHA (1.0f)
  4655. #endif
  4656. #ifndef SG_DEFAULT_CLEAR_DEPTH
  4657. #define SG_DEFAULT_CLEAR_DEPTH (1.0f)
  4658. #endif
  4659. #ifndef SG_DEFAULT_CLEAR_STENCIL
  4660. #define SG_DEFAULT_CLEAR_STENCIL (0)
  4661. #endif
  4662. #ifdef _MSC_VER
  4663. #pragma warning(push)
  4664. #pragma warning(disable:4115) // named type definition in parentheses
  4665. #pragma warning(disable:4505) // unreferenced local function has been removed
  4666. #pragma warning(disable:4201) // nonstandard extension used: nameless struct/union (needed by d3d11.h)
  4667. #pragma warning(disable:4054) // 'type cast': from function pointer
  4668. #pragma warning(disable:4055) // 'type cast': from data pointer
  4669. #endif
  4670. #if defined(SOKOL_D3D11)
  4671. #if defined(__GNUC__)
  4672. #pragma GCC diagnostic push
  4673. #pragma GCC diagnostic ignored "-Wunknown-pragmas"
  4674. #endif
  4675. #ifndef D3D11_NO_HELPERS
  4676. #define D3D11_NO_HELPERS
  4677. #endif
  4678. #ifndef WIN32_LEAN_AND_MEAN
  4679. #define WIN32_LEAN_AND_MEAN
  4680. #endif
  4681. #ifndef NOMINMAX
  4682. #define NOMINMAX
  4683. #endif
  4684. #include <d3d11.h>
  4685. #include <d3dcompiler.h>
  4686. #pragma comment (lib, "kernel32")
  4687. #pragma comment (lib, "user32")
  4688. #pragma comment (lib, "dxgi")
  4689. #pragma comment (lib, "d3d11")
  4690. #if defined(__GNUC__)
  4691. #pragma GCC diagnostic pop
  4692. #endif
  4693. #elif defined(SOKOL_METAL)
  4694. // see https://clang.llvm.org/docs/LanguageExtensions.html#automatic-reference-counting
  4695. #if !defined(__cplusplus)
  4696. #if __has_feature(objc_arc) && !__has_feature(objc_arc_fields)
  4697. #error "sokol_gfx.h requires __has_feature(objc_arc_field) if ARC is enabled (use a more recent compiler version)"
  4698. #endif
  4699. #endif
  4700. #include <TargetConditionals.h>
  4701. #include <AvailabilityMacros.h>
  4702. #if defined(TARGET_OS_IPHONE) && !TARGET_OS_IPHONE
  4703. #define _SG_TARGET_MACOS (1)
  4704. #else
  4705. #define _SG_TARGET_IOS (1)
  4706. #if defined(TARGET_IPHONE_SIMULATOR) && TARGET_IPHONE_SIMULATOR
  4707. #define _SG_TARGET_IOS_SIMULATOR (1)
  4708. #endif
  4709. #endif
  4710. #import <Metal/Metal.h>
  4711. #import <QuartzCore/CoreAnimation.h> // needed for CAMetalDrawable
  4712. #elif defined(SOKOL_WGPU)
  4713. #include <webgpu/webgpu.h>
  4714. #if defined(__EMSCRIPTEN__)
  4715. #include <emscripten/emscripten.h>
  4716. #endif
  4717. #elif defined(SOKOL_VULKAN)
  4718. #include <vulkan/vulkan.h>
  4719. #elif defined(SOKOL_GLCORE) || defined(SOKOL_GLES3)
  4720. #define _SOKOL_ANY_GL (1)
  4721. // include platform specific GL headers (or on Win32: use an embedded GL loader)
  4722. #if !defined(SOKOL_EXTERNAL_GL_LOADER)
  4723. #if defined(_WIN32)
  4724. #if defined(SOKOL_GLCORE)
  4725. #define _SOKOL_USE_WIN32_GL_LOADER (1)
  4726. #ifndef WIN32_LEAN_AND_MEAN
  4727. #define WIN32_LEAN_AND_MEAN
  4728. #endif
  4729. #ifndef NOMINMAX
  4730. #define NOMINMAX
  4731. #endif
  4732. #include <windows.h>
  4733. #pragma comment (lib, "kernel32") // GetProcAddress()
  4734. #endif
  4735. #elif defined(__APPLE__)
  4736. #include <TargetConditionals.h>
  4737. #ifndef GL_SILENCE_DEPRECATION
  4738. #define GL_SILENCE_DEPRECATION
  4739. #endif
  4740. #if defined(TARGET_OS_IPHONE) && !TARGET_OS_IPHONE
  4741. #include <OpenGL/gl3.h>
  4742. #else
  4743. #include <OpenGLES/ES3/gl.h>
  4744. #include <OpenGLES/ES3/glext.h>
  4745. #endif
  4746. #elif defined(__EMSCRIPTEN__)
  4747. #if defined(SOKOL_GLES3)
  4748. #include <GLES3/gl3.h>
  4749. #endif
  4750. #elif defined(__ANDROID__)
  4751. #include <GLES3/gl31.h>
  4752. #elif defined(__linux__) || defined(__unix__)
  4753. #if defined(SOKOL_GLCORE)
  4754. #define GL_GLEXT_PROTOTYPES
  4755. #include <GL/gl.h>
  4756. #else
  4757. #include <GLES3/gl32.h>
  4758. #include <GLES3/gl3ext.h>
  4759. #endif
  4760. #endif
  4761. #endif
  4762. // broad GL feature availability defines (DON'T merge this into the above ifdef-block!)
  4763. #if defined(_WIN32)
  4764. #if defined(GL_VERSION_4_3) || defined(_SOKOL_USE_WIN32_GL_LOADER)
  4765. #define _SOKOL_GL_HAS_COMPUTE (1)
  4766. #define _SOKOL_GL_HAS_TEXVIEWS (1)
  4767. #endif
  4768. #if defined(GL_VERSION_4_2) || defined(_SOKOL_USE_WIN32_GL_LOADER)
  4769. #define _SOKOL_GL_HAS_TEXSTORAGE (1)
  4770. #define _SOKOL_GL_HAS_BASEINSTANCE (1)
  4771. #endif
  4772. #if defined(GL_VERSION_3_2) || defined(_SOKOL_USE_WIN32_GL_LOADER)
  4773. #define _SOKOL_GL_HAS_BASEVERTEX (1)
  4774. #endif
  4775. #elif defined(__APPLE__)
  4776. #if defined(TARGET_OS_IPHONE) && !TARGET_OS_IPHONE
  4777. #define _SOKOL_GL_HAS_BASEVERTEX (1)
  4778. #else
  4779. #define _SOKOL_GL_HAS_TEXSTORAGE (1)
  4780. #endif
  4781. #elif defined(__EMSCRIPTEN__)
  4782. #define _SOKOL_GL_HAS_TEXSTORAGE (1)
  4783. #elif defined(__ANDROID__)
  4784. #define _SOKOL_GL_HAS_COMPUTE (1)
  4785. #define _SOKOL_GL_HAS_TEXSTORAGE (1)
  4786. #elif defined(__linux__) || defined(__unix__)
  4787. #if defined(SOKOL_GLCORE)
  4788. #if defined(GL_VERSION_4_3)
  4789. #define _SOKOL_GL_HAS_COMPUTE (1)
  4790. #define _SOKOL_GL_HAS_TEXVIEWS (1)
  4791. #endif
  4792. #if defined(GL_VERSION_4_2)
  4793. #define _SOKOL_GL_HAS_TEXSTORAGE (1)
  4794. #define _SOKOL_GL_HAS_BASEINSTANCE (1)
  4795. #endif
  4796. #if defined(GL_VERSION_3_2)
  4797. #define _SOKOL_GL_HAS_BASEVERTEX (1)
  4798. #endif
  4799. #else
  4800. #define _SOKOL_GL_HAS_COMPUTE (1)
  4801. #define _SOKOL_GL_HAS_TEXSTORAGE (1)
  4802. #define _SOKOL_GL_HAS_BASEVERTEX (1)
  4803. #endif
  4804. #endif
  4805. // optional GL loader definitions (only on Win32)
  4806. #if defined(_SOKOL_USE_WIN32_GL_LOADER)
  4807. #define __gl_h_ 1
  4808. #define __gl32_h_ 1
  4809. #define __gl31_h_ 1
  4810. #define __GL_H__ 1
  4811. #define __glext_h_ 1
  4812. #define __GLEXT_H_ 1
  4813. #define __gltypes_h_ 1
  4814. #define __glcorearb_h_ 1
  4815. #define __gl_glcorearb_h_ 1
  4816. #define GL_APIENTRY APIENTRY
  4817. typedef unsigned int GLenum;
  4818. typedef unsigned int GLuint;
  4819. typedef int GLsizei;
  4820. typedef char GLchar;
  4821. typedef ptrdiff_t GLintptr;
  4822. typedef ptrdiff_t GLsizeiptr;
  4823. typedef double GLclampd;
  4824. typedef unsigned short GLushort;
  4825. typedef unsigned char GLubyte;
  4826. typedef unsigned char GLboolean;
  4827. typedef uint64_t GLuint64;
  4828. typedef double GLdouble;
  4829. typedef unsigned short GLhalf;
  4830. typedef float GLclampf;
  4831. typedef unsigned int GLbitfield;
  4832. typedef signed char GLbyte;
  4833. typedef short GLshort;
  4834. typedef void GLvoid;
  4835. typedef int64_t GLint64;
  4836. typedef float GLfloat;
  4837. typedef int GLint;
  4838. #define GL_INT_2_10_10_10_REV 0x8D9F
  4839. #define GL_R32F 0x822E
  4840. #define GL_PROGRAM_POINT_SIZE 0x8642
  4841. #define GL_DEPTH_ATTACHMENT 0x8D00
  4842. #define GL_DEPTH_STENCIL_ATTACHMENT 0x821A
  4843. #define GL_COLOR_ATTACHMENT0 0x8CE0
  4844. #define GL_R16F 0x822D
  4845. #define GL_DRAW_FRAMEBUFFER 0x8CA9
  4846. #define GL_FRAMEBUFFER_COMPLETE 0x8CD5
  4847. #define GL_NUM_EXTENSIONS 0x821D
  4848. #define GL_INFO_LOG_LENGTH 0x8B84
  4849. #define GL_VERTEX_SHADER 0x8B31
  4850. #define GL_INCR 0x1E02
  4851. #define GL_DYNAMIC_DRAW 0x88E8
  4852. #define GL_STATIC_DRAW 0x88E4
  4853. #define GL_TEXTURE_CUBE_MAP_POSITIVE_Z 0x8519
  4854. #define GL_TEXTURE_CUBE_MAP 0x8513
  4855. #define GL_FUNC_SUBTRACT 0x800A
  4856. #define GL_FUNC_REVERSE_SUBTRACT 0x800B
  4857. #define GL_CONSTANT_COLOR 0x8001
  4858. #define GL_DECR_WRAP 0x8508
  4859. #define GL_R8 0x8229
  4860. #define GL_LINEAR_MIPMAP_LINEAR 0x2703
  4861. #define GL_ELEMENT_ARRAY_BUFFER 0x8893
  4862. #define GL_SHORT 0x1402
  4863. #define GL_DEPTH_TEST 0x0B71
  4864. #define GL_TEXTURE_CUBE_MAP_NEGATIVE_Y 0x8518
  4865. #define GL_LINK_STATUS 0x8B82
  4866. #define GL_TEXTURE_CUBE_MAP_POSITIVE_Y 0x8517
  4867. #define GL_SAMPLE_ALPHA_TO_COVERAGE 0x809E
  4868. #define GL_RGBA16F 0x881A
  4869. #define GL_CONSTANT_ALPHA 0x8003
  4870. #define GL_READ_FRAMEBUFFER 0x8CA8
  4871. #define GL_TEXTURE0 0x84C0
  4872. #define GL_TEXTURE_MIN_LOD 0x813A
  4873. #define GL_CLAMP_TO_EDGE 0x812F
  4874. #define GL_UNSIGNED_SHORT_5_6_5 0x8363
  4875. #define GL_TEXTURE_WRAP_R 0x8072
  4876. #define GL_UNSIGNED_SHORT_5_5_5_1 0x8034
  4877. #define GL_NEAREST_MIPMAP_NEAREST 0x2700
  4878. #define GL_UNSIGNED_SHORT_4_4_4_4 0x8033
  4879. #define GL_SRC_ALPHA_SATURATE 0x0308
  4880. #define GL_STREAM_DRAW 0x88E0
  4881. #define GL_ONE 1
  4882. #define GL_NEAREST_MIPMAP_LINEAR 0x2702
  4883. #define GL_RGB10_A2 0x8059
  4884. #define GL_RGBA8 0x8058
  4885. #define GL_SRGB8_ALPHA8 0x8C43
  4886. #define GL_RGBA4 0x8056
  4887. #define GL_RGB8 0x8051
  4888. #define GL_ARRAY_BUFFER 0x8892
  4889. #define GL_STENCIL 0x1802
  4890. #define GL_TEXTURE_2D 0x0DE1
  4891. #define GL_DEPTH 0x1801
  4892. #define GL_FRONT 0x0404
  4893. #define GL_STENCIL_BUFFER_BIT 0x00000400
  4894. #define GL_REPEAT 0x2901
  4895. #define GL_RGBA 0x1908
  4896. #define GL_TEXTURE_CUBE_MAP_POSITIVE_X 0x8515
  4897. #define GL_DECR 0x1E03
  4898. #define GL_FRAGMENT_SHADER 0x8B30
  4899. #define GL_COMPUTE_SHADER 0x91B9
  4900. #define GL_FLOAT 0x1406
  4901. #define GL_TEXTURE_MAX_LOD 0x813B
  4902. #define GL_DEPTH_COMPONENT 0x1902
  4903. #define GL_ONE_MINUS_DST_ALPHA 0x0305
  4904. #define GL_COLOR 0x1800
  4905. #define GL_TEXTURE_2D_ARRAY 0x8C1A
  4906. #define GL_TRIANGLES 0x0004
  4907. #define GL_UNSIGNED_BYTE 0x1401
  4908. #define GL_TEXTURE_MAG_FILTER 0x2800
  4909. #define GL_ONE_MINUS_CONSTANT_ALPHA 0x8004
  4910. #define GL_NONE 0
  4911. #define GL_SRC_COLOR 0x0300
  4912. #define GL_BYTE 0x1400
  4913. #define GL_TEXTURE_CUBE_MAP_NEGATIVE_Z 0x851A
  4914. #define GL_LINE_STRIP 0x0003
  4915. #define GL_TEXTURE_3D 0x806F
  4916. #define GL_CW 0x0900
  4917. #define GL_LINEAR 0x2601
  4918. #define GL_RENDERBUFFER 0x8D41
  4919. #define GL_GEQUAL 0x0206
  4920. #define GL_COLOR_BUFFER_BIT 0x00004000
  4921. #define GL_RGBA32F 0x8814
  4922. #define GL_BLEND 0x0BE2
  4923. #define GL_ONE_MINUS_SRC_ALPHA 0x0303
  4924. #define GL_ONE_MINUS_CONSTANT_COLOR 0x8002
  4925. #define GL_TEXTURE_WRAP_T 0x2803
  4926. #define GL_TEXTURE_WRAP_S 0x2802
  4927. #define GL_TEXTURE_MIN_FILTER 0x2801
  4928. #define GL_LINEAR_MIPMAP_NEAREST 0x2701
  4929. #define GL_EXTENSIONS 0x1F03
  4930. #define GL_NO_ERROR 0
  4931. #define GL_REPLACE 0x1E01
  4932. #define GL_KEEP 0x1E00
  4933. #define GL_CCW 0x0901
  4934. #define GL_TEXTURE_CUBE_MAP_NEGATIVE_X 0x8516
  4935. #define GL_RGB 0x1907
  4936. #define GL_TRIANGLE_STRIP 0x0005
  4937. #define GL_FALSE 0
  4938. #define GL_ZERO 0
  4939. #define GL_CULL_FACE 0x0B44
  4940. #define GL_INVERT 0x150A
  4941. #define GL_INT 0x1404
  4942. #define GL_UNSIGNED_INT 0x1405
  4943. #define GL_UNSIGNED_SHORT 0x1403
  4944. #define GL_NEAREST 0x2600
  4945. #define GL_SCISSOR_TEST 0x0C11
  4946. #define GL_LEQUAL 0x0203
  4947. #define GL_STENCIL_TEST 0x0B90
  4948. #define GL_DITHER 0x0BD0
  4949. #define GL_DEPTH_COMPONENT32F 0x8CAC
  4950. #define GL_EQUAL 0x0202
  4951. #define GL_FRAMEBUFFER 0x8D40
  4952. #define GL_RGB5 0x8050
  4953. #define GL_LINES 0x0001
  4954. #define GL_DEPTH_BUFFER_BIT 0x00000100
  4955. #define GL_SRC_ALPHA 0x0302
  4956. #define GL_INCR_WRAP 0x8507
  4957. #define GL_LESS 0x0201
  4958. #define GL_MULTISAMPLE 0x809D
  4959. #define GL_FRAMEBUFFER_BINDING 0x8CA6
  4960. #define GL_BACK 0x0405
  4961. #define GL_ALWAYS 0x0207
  4962. #define GL_FUNC_ADD 0x8006
  4963. #define GL_ONE_MINUS_DST_COLOR 0x0307
  4964. #define GL_NOTEQUAL 0x0205
  4965. #define GL_DST_COLOR 0x0306
  4966. #define GL_COMPILE_STATUS 0x8B81
  4967. #define GL_RED 0x1903
  4968. #define GL_DST_ALPHA 0x0304
  4969. #define GL_RGB5_A1 0x8057
  4970. #define GL_GREATER 0x0204
  4971. #define GL_POLYGON_OFFSET_FILL 0x8037
  4972. #define GL_TRUE 1
  4973. #define GL_NEVER 0x0200
  4974. #define GL_POINTS 0x0000
  4975. #define GL_ONE_MINUS_SRC_COLOR 0x0301
  4976. #define GL_MIRRORED_REPEAT 0x8370
  4977. #define GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS 0x8B4D
  4978. #define GL_R11F_G11F_B10F 0x8C3A
  4979. #define GL_UNSIGNED_INT_10F_11F_11F_REV 0x8C3B
  4980. #define GL_RGB9_E5 0x8C3D
  4981. #define GL_UNSIGNED_INT_5_9_9_9_REV 0x8C3E
  4982. #define GL_RGBA32UI 0x8D70
  4983. #define GL_RGB32UI 0x8D71
  4984. #define GL_RGBA16UI 0x8D76
  4985. #define GL_RGB16UI 0x8D77
  4986. #define GL_RGBA8UI 0x8D7C
  4987. #define GL_RGB8UI 0x8D7D
  4988. #define GL_RGBA32I 0x8D82
  4989. #define GL_RGB32I 0x8D83
  4990. #define GL_RGBA16I 0x8D88
  4991. #define GL_RGB16I 0x8D89
  4992. #define GL_RGBA8I 0x8D8E
  4993. #define GL_RGB8I 0x8D8F
  4994. #define GL_RED_INTEGER 0x8D94
  4995. #define GL_RG 0x8227
  4996. #define GL_RG_INTEGER 0x8228
  4997. #define GL_R8 0x8229
  4998. #define GL_R16 0x822A
  4999. #define GL_RG8 0x822B
  5000. #define GL_RG16 0x822C
  5001. #define GL_R16F 0x822D
  5002. #define GL_R32F 0x822E
  5003. #define GL_RG16F 0x822F
  5004. #define GL_RG32F 0x8230
  5005. #define GL_R8I 0x8231
  5006. #define GL_R8UI 0x8232
  5007. #define GL_R16I 0x8233
  5008. #define GL_R16UI 0x8234
  5009. #define GL_R32I 0x8235
  5010. #define GL_R32UI 0x8236
  5011. #define GL_RG8I 0x8237
  5012. #define GL_RG8UI 0x8238
  5013. #define GL_RG16I 0x8239
  5014. #define GL_RG16UI 0x823A
  5015. #define GL_RG32I 0x823B
  5016. #define GL_RG32UI 0x823C
  5017. #define GL_RGBA_INTEGER 0x8D99
  5018. #define GL_R8_SNORM 0x8F94
  5019. #define GL_RG8_SNORM 0x8F95
  5020. #define GL_RGB8_SNORM 0x8F96
  5021. #define GL_RGBA8_SNORM 0x8F97
  5022. #define GL_R16_SNORM 0x8F98
  5023. #define GL_RG16_SNORM 0x8F99
  5024. #define GL_RGB16_SNORM 0x8F9A
  5025. #define GL_RGBA16_SNORM 0x8F9B
  5026. #define GL_RGBA16 0x805B
  5027. #define GL_MAX_TEXTURE_SIZE 0x0D33
  5028. #define GL_MAX_CUBE_MAP_TEXTURE_SIZE 0x851C
  5029. #define GL_MAX_3D_TEXTURE_SIZE 0x8073
  5030. #define GL_MAX_ARRAY_TEXTURE_LAYERS 0x88FF
  5031. #define GL_MAX_VERTEX_ATTRIBS 0x8869
  5032. #define GL_CLAMP_TO_BORDER 0x812D
  5033. #define GL_TEXTURE_BORDER_COLOR 0x1004
  5034. #define GL_CURRENT_PROGRAM 0x8B8D
  5035. #define GL_MAX_VERTEX_UNIFORM_COMPONENTS 0x8B4A
  5036. #define GL_UNPACK_ALIGNMENT 0x0CF5
  5037. #define GL_FRAMEBUFFER_SRGB 0x8DB9
  5038. #define GL_TEXTURE_COMPARE_MODE 0x884C
  5039. #define GL_TEXTURE_COMPARE_FUNC 0x884D
  5040. #define GL_COMPARE_REF_TO_TEXTURE 0x884E
  5041. #define GL_TEXTURE_CUBE_MAP_SEAMLESS 0x884F
  5042. #define GL_TEXTURE_MAX_LEVEL 0x813D
  5043. #define GL_FRAMEBUFFER_UNDEFINED 0x8219
  5044. #define GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT 0x8CD6
  5045. #define GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT 0x8CD7
  5046. #define GL_FRAMEBUFFER_UNSUPPORTED 0x8CDD
  5047. #define GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE 0x8D56
  5048. #define GL_MAJOR_VERSION 0x821B
  5049. #define GL_MINOR_VERSION 0x821C
  5050. #define GL_TEXTURE_2D_MULTISAMPLE 0x9100
  5051. #define GL_TEXTURE_2D_MULTISAMPLE_ARRAY 0x9102
  5052. #define GL_SHADER_STORAGE_BARRIER_BIT 0x2000
  5053. #define GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT 0x00000001
  5054. #define GL_ELEMENT_ARRAY_BARRIER_BIT 0x00000002
  5055. #define GL_TEXTURE_FETCH_BARRIER_BIT 0x00000008
  5056. #define GL_SHADER_IMAGE_ACCESS_BARRIER_BIT 0x00000020
  5057. #define GL_FRAMEBUFFER_BARRIER_BIT 0x00000400
  5058. #define GL_MIN 0x8007
  5059. #define GL_MAX 0x8008
  5060. #define GL_WRITE_ONLY 0x88B9
  5061. #define GL_READ_WRITE 0x88BA
  5062. #define GL_MAX_DRAW_BUFFERS 0x8824
  5063. #define GL_MAX_TEXTURE_IMAGE_UNITS 0x8872
  5064. #define GL_MAX_SHADER_STORAGE_BUFFER_BINDINGS 0x90DD
  5065. #define GL_MAX_IMAGE_UNITS 0x8F38
  5066. #endif
  5067. #ifndef GL_UNSIGNED_INT_2_10_10_10_REV
  5068. #define GL_UNSIGNED_INT_2_10_10_10_REV 0x8368
  5069. #endif
  5070. #ifndef GL_UNSIGNED_INT_24_8
  5071. #define GL_UNSIGNED_INT_24_8 0x84FA
  5072. #endif
  5073. #ifndef GL_TEXTURE_MAX_ANISOTROPY_EXT
  5074. #define GL_TEXTURE_MAX_ANISOTROPY_EXT 0x84FE
  5075. #endif
  5076. #ifndef GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT
  5077. #define GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT 0x84FF
  5078. #endif
  5079. #ifndef GL_COMPRESSED_RGBA_S3TC_DXT1_EXT
  5080. #define GL_COMPRESSED_RGBA_S3TC_DXT1_EXT 0x83F1
  5081. #endif
  5082. #ifndef GL_COMPRESSED_RGBA_S3TC_DXT3_EXT
  5083. #define GL_COMPRESSED_RGBA_S3TC_DXT3_EXT 0x83F2
  5084. #endif
  5085. #ifndef GL_COMPRESSED_RGBA_S3TC_DXT5_EXT
  5086. #define GL_COMPRESSED_RGBA_S3TC_DXT5_EXT 0x83F3
  5087. #endif
  5088. #ifndef GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT
  5089. #define GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT 0x8C4F
  5090. #endif
  5091. #ifndef GL_COMPRESSED_RED_RGTC1
  5092. #define GL_COMPRESSED_RED_RGTC1 0x8DBB
  5093. #endif
  5094. #ifndef GL_COMPRESSED_SIGNED_RED_RGTC1
  5095. #define GL_COMPRESSED_SIGNED_RED_RGTC1 0x8DBC
  5096. #endif
  5097. #ifndef GL_COMPRESSED_RED_GREEN_RGTC2
  5098. #define GL_COMPRESSED_RED_GREEN_RGTC2 0x8DBD
  5099. #endif
  5100. #ifndef GL_COMPRESSED_SIGNED_RED_GREEN_RGTC2
  5101. #define GL_COMPRESSED_SIGNED_RED_GREEN_RGTC2 0x8DBE
  5102. #endif
  5103. #ifndef GL_COMPRESSED_RGBA_BPTC_UNORM_ARB
  5104. #define GL_COMPRESSED_RGBA_BPTC_UNORM_ARB 0x8E8C
  5105. #endif
  5106. #ifndef GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM_ARB
  5107. #define GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM_ARB 0x8E8D
  5108. #endif
  5109. #ifndef GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT_ARB
  5110. #define GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT_ARB 0x8E8E
  5111. #endif
  5112. #ifndef GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_ARB
  5113. #define GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_ARB 0x8E8F
  5114. #endif
  5115. #ifndef GL_COMPRESSED_RGB8_ETC2
  5116. #define GL_COMPRESSED_RGB8_ETC2 0x9274
  5117. #endif
  5118. #ifndef GL_COMPRESSED_SRGB8_ETC2
  5119. #define GL_COMPRESSED_SRGB8_ETC2 0x9275
  5120. #endif
  5121. #ifndef GL_COMPRESSED_RGBA8_ETC2_EAC
  5122. #define GL_COMPRESSED_RGBA8_ETC2_EAC 0x9278
  5123. #endif
  5124. #ifndef GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC
  5125. #define GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC 0x9279
  5126. #endif
  5127. #ifndef GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2
  5128. #define GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 0x9276
  5129. #endif
  5130. #ifndef GL_COMPRESSED_R11_EAC
  5131. #define GL_COMPRESSED_R11_EAC 0x9270
  5132. #endif
  5133. #ifndef GL_COMPRESSED_SIGNED_R11_EAC
  5134. #define GL_COMPRESSED_SIGNED_R11_EAC 0x9271
  5135. #endif
  5136. #ifndef GL_COMPRESSED_RG11_EAC
  5137. #define GL_COMPRESSED_RG11_EAC 0x9272
  5138. #endif
  5139. #ifndef GL_COMPRESSED_SIGNED_RG11_EAC
  5140. #define GL_COMPRESSED_SIGNED_RG11_EAC 0x9273
  5141. #endif
  5142. #ifndef GL_COMPRESSED_RGBA_ASTC_4x4_KHR
  5143. #define GL_COMPRESSED_RGBA_ASTC_4x4_KHR 0x93B0
  5144. #endif
  5145. #ifndef GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR
  5146. #define GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR 0x93D0
  5147. #endif
  5148. #ifndef GL_DEPTH24_STENCIL8
  5149. #define GL_DEPTH24_STENCIL8 0x88F0
  5150. #endif
  5151. #ifndef GL_HALF_FLOAT
  5152. #define GL_HALF_FLOAT 0x140B
  5153. #endif
  5154. #ifndef GL_DEPTH_STENCIL
  5155. #define GL_DEPTH_STENCIL 0x84F9
  5156. #endif
  5157. #ifndef GL_LUMINANCE
  5158. #define GL_LUMINANCE 0x1909
  5159. #endif
  5160. #ifndef GL_COMPUTE_SHADER
  5161. #define GL_COMPUTE_SHADER 0x91B9
  5162. #endif
  5163. #ifndef _SG_GL_CHECK_ERROR
  5164. #if defined(__EMSCRIPTEN__)
  5165. // generally turn off glGetError() on WASM, it's a too big performance hit
  5166. // and WebGL provides much better diagnostics anyway
  5167. #define _SG_GL_CHECK_ERROR()
  5168. #elif defined(SOKOL_DEBUG)
  5169. // make sure that glGetError() is only called in debug mode
  5170. #define _SG_GL_CHECK_ERROR() { SOKOL_ASSERT(glGetError() == GL_NO_ERROR); }
  5171. #else
  5172. #define _SG_GL_CHECK_ERROR()
  5173. #endif
  5174. #endif
  5175. // make some GL constants generally available to simplify compilation,
  5176. // use of those constants will be filtered by runtime flags
  5177. #ifndef GL_SHADER_STORAGE_BUFFER
  5178. #define GL_SHADER_STORAGE_BUFFER 0x90D2
  5179. #endif
  5180. #endif
  5181. #if defined(SOKOL_GLES3)
  5182. // on WebGL2, GL_FRAMEBUFFER_UNDEFINED technically doesn't exist (it is defined
  5183. // in the Emscripten headers, but may not exist in other WebGL2 shims)
  5184. // see: https://github.com/floooh/sokol/pull/933
  5185. #ifndef GL_FRAMEBUFFER_UNDEFINED
  5186. #define GL_FRAMEBUFFER_UNDEFINED 0x8219
  5187. #endif
  5188. #endif
  5189. // ███████ ████████ ██████ ██ ██ ██████ ████████ ███████
  5190. // ██ ██ ██ ██ ██ ██ ██ ██ ██
  5191. // ███████ ██ ██████ ██ ██ ██ ██ ███████
  5192. // ██ ██ ██ ██ ██ ██ ██ ██ ██
  5193. // ███████ ██ ██ ██ ██████ ██████ ██ ███████
  5194. //
  5195. // >>structs
  5196. typedef struct { int x, y, w, h; } _sg_recti_t;
  5197. typedef struct { int width, height; } _sg_dimi_t;
  5198. // resource pool slots
  5199. typedef struct {
  5200. uint32_t id;
  5201. uint32_t uninit_count;
  5202. sg_resource_state state;
  5203. } _sg_slot_t;
  5204. // resource pool housekeeping struct
  5205. typedef struct {
  5206. int size;
  5207. int queue_top;
  5208. uint32_t* gen_ctrs;
  5209. int* free_queue;
  5210. } _sg_pool_t;
  5211. // resource hazard tracking struct
  5212. typedef struct {
  5213. int num_slots;
  5214. int cur_slot;
  5215. uint32_t* slots; // tracked unique resource ids
  5216. uint32_t occupy_num_bytes; // size of occupy_bits array in bytes
  5217. uint8_t* occupy_bits; // one set bit for each unique resource (idx = (id & 0xFFFF) >> 3)
  5218. } _sg_track_t;
  5219. // resource func forward decls
  5220. struct _sg_buffer_s;
  5221. struct _sg_image_s;
  5222. struct _sg_sampler_s;
  5223. struct _sg_shader_s;
  5224. struct _sg_pipeline_s;
  5225. struct _sg_view_s;
  5226. // a general resource slot reference useful for caches
  5227. typedef struct _sg_sref_s {
  5228. uint32_t id;
  5229. uint32_t uninit_count;
  5230. } _sg_sref_t;
  5231. // safe (in debug mode) internal resource references
  5232. typedef struct _sg_buffer_ref_s {
  5233. struct _sg_buffer_s* ptr;
  5234. _sg_sref_t sref;
  5235. } _sg_buffer_ref_t;
  5236. typedef struct _sg_image_ref_s {
  5237. struct _sg_image_s* ptr;
  5238. _sg_sref_t sref;
  5239. } _sg_image_ref_t;
  5240. typedef struct _sg_sampler_ref_t {
  5241. struct _sg_sampler_s* ptr;
  5242. _sg_sref_t sref;
  5243. } _sg_sampler_ref_t;
  5244. typedef struct _sg_shader_ref_s {
  5245. struct _sg_shader_s* ptr;
  5246. _sg_sref_t sref;
  5247. } _sg_shader_ref_t;
  5248. typedef struct _sg_pipeline_ref_s {
  5249. struct _sg_pipeline_s* ptr;
  5250. _sg_sref_t sref;
  5251. } _sg_pipeline_ref_t;
  5252. typedef struct _sg_view_ref_s {
  5253. struct _sg_view_s* ptr;
  5254. _sg_sref_t sref;
  5255. } _sg_view_ref_t;
  5256. // constants
  5257. enum {
  5258. _SG_STRING_SIZE = 32,
  5259. _SG_SLOT_SHIFT = 16,
  5260. _SG_SLOT_MASK = (1<<_SG_SLOT_SHIFT)-1,
  5261. _SG_MAX_POOL_SIZE = (1<<_SG_SLOT_SHIFT),
  5262. _SG_DEFAULT_BUFFER_POOL_SIZE = 128,
  5263. _SG_DEFAULT_IMAGE_POOL_SIZE = 128,
  5264. _SG_DEFAULT_SAMPLER_POOL_SIZE = 64,
  5265. _SG_DEFAULT_SHADER_POOL_SIZE = 32,
  5266. _SG_DEFAULT_PIPELINE_POOL_SIZE = 64,
  5267. _SG_DEFAULT_VIEW_POOL_SIZE = 256,
  5268. _SG_DEFAULT_UB_SIZE = 4 * 1024 * 1024,
  5269. _SG_DEFAULT_MAX_COMMIT_LISTENERS = 1024,
  5270. _SG_DEFAULT_WGPU_BINDGROUP_CACHE_SIZE = 1024,
  5271. _SG_DEFAULT_VK_COPY_STAGING_SIZE = (4 * 1024 * 1024),
  5272. _SG_DEFAULT_VK_STREAM_STAGING_SIZE = (16 * 1024 * 1024),
  5273. _SG_DEFAULT_VK_DESCRIPTOR_BUFFER_SIZE = (16 * 1024 * 1024),
  5274. _SG_MAX_STORAGEBUFFER_BINDINGS_PER_STAGE = SG_MAX_VIEW_BINDSLOTS,
  5275. _SG_MAX_STORAGEIMAGE_BINDINGS_PER_STAGE = SG_MAX_VIEW_BINDSLOTS,
  5276. _SG_MAX_TEXTURE_BINDINGS_PER_STAGE = SG_MAX_VIEW_BINDSLOTS,
  5277. _SG_MAX_UNIFORMBLOCK_BINDINGS_PER_STAGE = 8,
  5278. };
  5279. // fixed-size string
  5280. typedef struct {
  5281. char buf[_SG_STRING_SIZE];
  5282. } _sg_str_t;
  5283. typedef struct {
  5284. int size;
  5285. int append_pos;
  5286. bool append_overflow;
  5287. uint32_t update_frame_index;
  5288. uint32_t append_frame_index;
  5289. int num_slots;
  5290. int active_slot;
  5291. sg_buffer_usage usage;
  5292. } _sg_buffer_common_t;
  5293. typedef struct {
  5294. uint32_t upd_frame_index;
  5295. int num_slots;
  5296. int active_slot;
  5297. sg_image_type type;
  5298. int width;
  5299. int height;
  5300. int num_slices;
  5301. int num_mipmaps;
  5302. sg_image_usage usage;
  5303. sg_pixel_format pixel_format;
  5304. int sample_count;
  5305. } _sg_image_common_t;
  5306. typedef struct {
  5307. sg_filter min_filter;
  5308. sg_filter mag_filter;
  5309. sg_filter mipmap_filter;
  5310. sg_wrap wrap_u;
  5311. sg_wrap wrap_v;
  5312. sg_wrap wrap_w;
  5313. float min_lod;
  5314. float max_lod;
  5315. sg_border_color border_color;
  5316. sg_compare_func compare;
  5317. uint32_t max_anisotropy;
  5318. } _sg_sampler_common_t;
  5319. typedef struct {
  5320. sg_shader_attr_base_type base_type;
  5321. } _sg_shader_attr_t;
  5322. typedef struct {
  5323. sg_shader_stage stage;
  5324. uint32_t size;
  5325. } _sg_shader_uniform_block_t;
  5326. typedef struct {
  5327. sg_shader_stage stage;
  5328. sg_view_type view_type;
  5329. sg_image_type image_type;
  5330. sg_pixel_format access_format;
  5331. sg_image_sample_type sample_type;
  5332. bool sbuf_readonly;
  5333. bool simg_writeonly;
  5334. bool multisampled;
  5335. } _sg_shader_view_t;
  5336. typedef struct {
  5337. sg_shader_stage stage;
  5338. sg_sampler_type sampler_type;
  5339. } _sg_shader_sampler_t;
  5340. typedef struct {
  5341. sg_shader_stage stage;
  5342. uint8_t view_slot;
  5343. uint8_t sampler_slot;
  5344. } _sg_shader_texture_sampler_t;
  5345. typedef struct {
  5346. uint32_t required_bindings_and_uniforms;
  5347. bool is_compute;
  5348. _sg_shader_attr_t attrs[SG_MAX_VERTEX_ATTRIBUTES];
  5349. _sg_shader_uniform_block_t uniform_blocks[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  5350. _sg_shader_view_t views[SG_MAX_VIEW_BINDSLOTS];
  5351. _sg_shader_sampler_t samplers[SG_MAX_SAMPLER_BINDSLOTS];
  5352. _sg_shader_texture_sampler_t texture_samplers[SG_MAX_TEXTURE_SAMPLER_PAIRS];
  5353. } _sg_shader_common_t;
  5354. typedef struct {
  5355. bool vertex_buffer_layout_active[SG_MAX_VERTEXBUFFER_BINDSLOTS];
  5356. bool use_instanced_draw;
  5357. bool is_compute;
  5358. uint32_t required_bindings_and_uniforms;
  5359. _sg_shader_ref_t shader;
  5360. sg_vertex_layout_state layout;
  5361. sg_depth_state depth;
  5362. sg_stencil_state stencil;
  5363. int color_count;
  5364. sg_color_target_state colors[SG_MAX_COLOR_ATTACHMENTS];
  5365. sg_primitive_type primitive_type;
  5366. sg_index_type index_type;
  5367. sg_cull_mode cull_mode;
  5368. sg_face_winding face_winding;
  5369. int sample_count;
  5370. sg_color blend_color;
  5371. bool alpha_to_coverage_enabled;
  5372. } _sg_pipeline_common_t;
  5373. typedef struct {
  5374. _sg_buffer_ref_t ref;
  5375. int offset;
  5376. } _sg_buffer_view_common_t;
  5377. typedef struct {
  5378. _sg_image_ref_t ref;
  5379. int mip_level;
  5380. int slice;
  5381. int mip_level_count;
  5382. int slice_count;
  5383. } _sg_image_view_common_t;
  5384. typedef struct {
  5385. sg_view_type type;
  5386. _sg_buffer_view_common_t buf;
  5387. _sg_image_view_common_t img;
  5388. } _sg_view_common_t;
  5389. #if defined(SOKOL_DUMMY_BACKEND)
  5390. typedef struct _sg_buffer_s {
  5391. _sg_slot_t slot;
  5392. _sg_buffer_common_t cmn;
  5393. } _sg_dummy_buffer_t;
  5394. typedef _sg_dummy_buffer_t _sg_buffer_t;
  5395. typedef struct _sg_image_s {
  5396. _sg_slot_t slot;
  5397. _sg_image_common_t cmn;
  5398. } _sg_dummy_image_t;
  5399. typedef _sg_dummy_image_t _sg_image_t;
  5400. typedef struct _sg_sampler_s {
  5401. _sg_slot_t slot;
  5402. _sg_sampler_common_t cmn;
  5403. } _sg_dummy_sampler_t;
  5404. typedef _sg_dummy_sampler_t _sg_sampler_t;
  5405. typedef struct _sg_shader_s {
  5406. _sg_slot_t slot;
  5407. _sg_shader_common_t cmn;
  5408. } _sg_dummy_shader_t;
  5409. typedef _sg_dummy_shader_t _sg_shader_t;
  5410. typedef struct _sg_pipeline_s {
  5411. _sg_slot_t slot;
  5412. _sg_pipeline_common_t cmn;
  5413. } _sg_dummy_pipeline_t;
  5414. typedef _sg_dummy_pipeline_t _sg_pipeline_t;
  5415. typedef struct _sg_view_s {
  5416. _sg_slot_t slot;
  5417. _sg_view_common_t cmn;
  5418. } _sg_dummy_view_t;
  5419. typedef _sg_dummy_view_t _sg_view_t;
  5420. #elif defined(_SOKOL_ANY_GL)
  5421. typedef enum {
  5422. _SG_GL_GPUDIRTY_VERTEXBUFFER = (1<<0),
  5423. _SG_GL_GPUDIRTY_INDEXBUFFER = (1<<1),
  5424. _SG_GL_GPUDIRTY_STORAGEBUFFER = (1<<2),
  5425. _SG_GL_GPUDIRTY_TEXTURE = (1<<3),
  5426. _SG_GL_GPUDIRTY_STORAGEIMAGE = (1<<4),
  5427. _SG_GL_GPUDIRTY_ATTACHMENT = (1<<5),
  5428. _SG_GL_GPUDIRTY_BUFFER_ALL = _SG_GL_GPUDIRTY_VERTEXBUFFER | _SG_GL_GPUDIRTY_INDEXBUFFER | _SG_GL_GPUDIRTY_STORAGEBUFFER,
  5429. _SG_GL_GPUDIRTY_IMAGE_ALL = _SG_GL_GPUDIRTY_TEXTURE | _SG_GL_GPUDIRTY_STORAGEIMAGE | _SG_GL_GPUDIRTY_ATTACHMENT,
  5430. } _sg_gl_gpudirty_t;
  5431. typedef struct _sg_buffer_s {
  5432. _sg_slot_t slot;
  5433. _sg_buffer_common_t cmn;
  5434. struct {
  5435. GLuint buf[SG_NUM_INFLIGHT_FRAMES];
  5436. uint8_t gpu_dirty_flags; // combination of _sg_gl_gpudirty_t flags
  5437. bool injected; // if true, external buffers were injected with sg_buffer_desc.gl_buffers
  5438. } gl;
  5439. } _sg_gl_buffer_t;
  5440. typedef _sg_gl_buffer_t _sg_buffer_t;
  5441. typedef struct _sg_image_s {
  5442. _sg_slot_t slot;
  5443. _sg_image_common_t cmn;
  5444. struct {
  5445. GLenum target;
  5446. GLuint tex[SG_NUM_INFLIGHT_FRAMES];
  5447. uint8_t gpu_dirty_flags; // combination of _sg_gl_gpudirty_flags
  5448. bool injected; // if true, external textures were injected with sg_image_desc.gl_textures
  5449. } gl;
  5450. } _sg_gl_image_t;
  5451. typedef _sg_gl_image_t _sg_image_t;
  5452. typedef struct _sg_sampler_s {
  5453. _sg_slot_t slot;
  5454. _sg_sampler_common_t cmn;
  5455. struct {
  5456. GLuint smp;
  5457. bool injected; // true if external sampler was injects in sg_sampler_desc.gl_sampler
  5458. } gl;
  5459. } _sg_gl_sampler_t;
  5460. typedef _sg_gl_sampler_t _sg_sampler_t;
  5461. typedef struct {
  5462. GLint gl_loc;
  5463. sg_uniform_type type;
  5464. uint16_t count;
  5465. uint16_t offset;
  5466. } _sg_gl_uniform_t;
  5467. typedef struct {
  5468. int num_uniforms;
  5469. _sg_gl_uniform_t uniforms[SG_MAX_UNIFORMBLOCK_MEMBERS];
  5470. } _sg_gl_uniform_block_t;
  5471. typedef struct {
  5472. _sg_str_t name;
  5473. } _sg_gl_shader_attr_t;
  5474. typedef struct _sg_shader_s {
  5475. _sg_slot_t slot;
  5476. _sg_shader_common_t cmn;
  5477. struct {
  5478. GLuint prog;
  5479. _sg_gl_shader_attr_t attrs[SG_MAX_VERTEX_ATTRIBUTES];
  5480. _sg_gl_uniform_block_t uniform_blocks[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  5481. uint8_t sbuf_binding[SG_MAX_VIEW_BINDSLOTS];
  5482. uint8_t simg_binding[SG_MAX_VIEW_BINDSLOTS];
  5483. int8_t tex_slot[SG_MAX_TEXTURE_SAMPLER_PAIRS]; // GL texture unit index
  5484. } gl;
  5485. } _sg_gl_shader_t;
  5486. typedef _sg_gl_shader_t _sg_shader_t;
  5487. typedef struct {
  5488. int8_t vb_index; // -1 if attr is not enabled
  5489. int8_t divisor; // -1 if not initialized
  5490. uint8_t stride;
  5491. uint8_t size;
  5492. uint8_t normalized;
  5493. int offset;
  5494. GLenum type;
  5495. sg_shader_attr_base_type base_type;
  5496. } _sg_gl_attr_t;
  5497. typedef struct _sg_pipeline_s {
  5498. _sg_slot_t slot;
  5499. _sg_pipeline_common_t cmn;
  5500. struct {
  5501. _sg_gl_attr_t attrs[SG_MAX_VERTEX_ATTRIBUTES];
  5502. sg_depth_state depth;
  5503. sg_stencil_state stencil;
  5504. sg_primitive_type primitive_type;
  5505. sg_blend_state blend;
  5506. sg_color_mask color_write_mask[SG_MAX_COLOR_ATTACHMENTS];
  5507. sg_cull_mode cull_mode;
  5508. sg_face_winding face_winding;
  5509. int sample_count;
  5510. bool alpha_to_coverage_enabled;
  5511. } gl;
  5512. } _sg_gl_pipeline_t;
  5513. typedef _sg_gl_pipeline_t _sg_pipeline_t;
  5514. typedef struct _sg_view_s {
  5515. _sg_slot_t slot;
  5516. _sg_view_common_t cmn;
  5517. struct {
  5518. GLuint tex_view[SG_NUM_INFLIGHT_FRAMES]; // only if sg_features.gl_texture_views
  5519. GLuint msaa_render_buffer; // only if !msaa_texture_bindings
  5520. GLuint msaa_resolve_frame_buffer;
  5521. } gl;
  5522. } _sg_gl_view_t;
  5523. typedef _sg_gl_view_t _sg_view_t;
  5524. typedef struct {
  5525. _sg_gl_attr_t gl_attr;
  5526. GLuint gl_vbuf;
  5527. } _sg_gl_cache_attr_t;
  5528. typedef struct {
  5529. GLenum target;
  5530. GLuint texture;
  5531. GLuint sampler;
  5532. } _sg_gl_cache_texture_sampler_bind_slot;
  5533. #define _SG_GL_MAX_SBUF_BINDINGS (_SG_MAX_STORAGEBUFFER_BINDINGS_PER_STAGE)
  5534. #define _SG_GL_MAX_SIMG_BINDINGS (_SG_MAX_STORAGEIMAGE_BINDINGS_PER_STAGE)
  5535. #define _SG_GL_MAX_TEX_SMP_BINDINGS (SG_MAX_TEXTURE_SAMPLER_PAIRS)
  5536. typedef struct {
  5537. sg_depth_state depth;
  5538. sg_stencil_state stencil;
  5539. sg_blend_state blend;
  5540. sg_color_mask color_write_mask[SG_MAX_COLOR_ATTACHMENTS];
  5541. sg_cull_mode cull_mode;
  5542. sg_face_winding face_winding;
  5543. bool polygon_offset_enabled;
  5544. int sample_count;
  5545. sg_color blend_color;
  5546. bool alpha_to_coverage_enabled;
  5547. _sg_gl_cache_attr_t attrs[SG_MAX_VERTEX_ATTRIBUTES];
  5548. GLuint vertex_buffer;
  5549. GLuint index_buffer;
  5550. GLuint storage_buffer; // general bind point
  5551. GLuint storage_buffers[_SG_GL_MAX_SBUF_BINDINGS];
  5552. int storage_buffer_offsets[_SG_GL_MAX_SBUF_BINDINGS];
  5553. GLuint stored_vertex_buffer;
  5554. GLuint stored_index_buffer;
  5555. GLuint stored_storage_buffer;
  5556. GLuint prog;
  5557. _sg_gl_cache_texture_sampler_bind_slot texture_samplers[_SG_GL_MAX_TEX_SMP_BINDINGS];
  5558. _sg_gl_cache_texture_sampler_bind_slot stored_texture_sampler;
  5559. int cur_ib_offset;
  5560. GLenum cur_primitive_type;
  5561. GLenum cur_index_type;
  5562. GLenum cur_active_texture;
  5563. _sg_sref_t cur_pip;
  5564. } _sg_gl_cache_t;
  5565. typedef struct {
  5566. bool valid;
  5567. GLuint vao; // global mutated vertex-array-object
  5568. GLuint fb; // global mutated framebuffer
  5569. _sg_gl_cache_t cache;
  5570. bool ext_anisotropic;
  5571. GLint max_anisotropy;
  5572. sg_store_action color_store_actions[SG_MAX_COLOR_ATTACHMENTS];
  5573. sg_store_action depth_store_action;
  5574. sg_store_action stencil_store_action;
  5575. #if _SOKOL_USE_WIN32_GL_LOADER
  5576. HINSTANCE opengl32_dll;
  5577. #endif
  5578. } _sg_gl_backend_t;
  5579. #elif defined(SOKOL_D3D11)
  5580. typedef struct _sg_buffer_s {
  5581. _sg_slot_t slot;
  5582. _sg_buffer_common_t cmn;
  5583. struct {
  5584. ID3D11Buffer* buf;
  5585. } d3d11;
  5586. } _sg_d3d11_buffer_t;
  5587. typedef _sg_d3d11_buffer_t _sg_buffer_t;
  5588. typedef struct _sg_image_s {
  5589. _sg_slot_t slot;
  5590. _sg_image_common_t cmn;
  5591. struct {
  5592. DXGI_FORMAT format;
  5593. ID3D11Texture2D* tex2d;
  5594. ID3D11Texture3D* tex3d;
  5595. ID3D11Resource* res; // either tex2d or tex3d
  5596. } d3d11;
  5597. } _sg_d3d11_image_t;
  5598. typedef _sg_d3d11_image_t _sg_image_t;
  5599. typedef struct _sg_sampler_s {
  5600. _sg_slot_t slot;
  5601. _sg_sampler_common_t cmn;
  5602. struct {
  5603. ID3D11SamplerState* smp;
  5604. } d3d11;
  5605. } _sg_d3d11_sampler_t;
  5606. typedef _sg_d3d11_sampler_t _sg_sampler_t;
  5607. typedef struct {
  5608. _sg_str_t sem_name;
  5609. int sem_index;
  5610. } _sg_d3d11_shader_attr_t;
  5611. #define _SG_D3D11_MAX_TEXTUREARRAY_LAYERS (2048)
  5612. #define _SG_D3D11_MAX_TEXTURE_SUBRESOURCES (SG_MAX_MIPMAPS * _SG_D3D11_MAX_TEXTUREARRAY_LAYERS)
  5613. #define _SG_D3D11_MAX_STAGE_UB_BINDINGS (_SG_MAX_UNIFORMBLOCK_BINDINGS_PER_STAGE)
  5614. #define _SG_D3D11_MAX_STAGE_SRV_BINDINGS (SG_MAX_VIEW_BINDSLOTS)
  5615. #define _SG_D3D11_MAX_STAGE_UAV_BINDINGS (SG_MAX_VIEW_BINDSLOTS)
  5616. #define _SG_D3D11_MAX_STAGE_SMP_BINDINGS (SG_MAX_SAMPLER_BINDSLOTS)
  5617. typedef struct _sg_shader_s {
  5618. _sg_slot_t slot;
  5619. _sg_shader_common_t cmn;
  5620. struct {
  5621. _sg_d3d11_shader_attr_t attrs[SG_MAX_VERTEX_ATTRIBUTES];
  5622. ID3D11VertexShader* vs;
  5623. ID3D11PixelShader* fs;
  5624. ID3D11ComputeShader* cs;
  5625. void* vs_blob;
  5626. size_t vs_blob_length;
  5627. uint8_t ub_register_b_n[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  5628. uint8_t view_register_t_n[SG_MAX_VIEW_BINDSLOTS];
  5629. uint8_t view_register_u_n[SG_MAX_VIEW_BINDSLOTS];
  5630. uint8_t smp_register_s_n[SG_MAX_SAMPLER_BINDSLOTS];
  5631. ID3D11Buffer* all_cbufs[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  5632. ID3D11Buffer* vs_cbufs[_SG_D3D11_MAX_STAGE_UB_BINDINGS];
  5633. ID3D11Buffer* fs_cbufs[_SG_D3D11_MAX_STAGE_UB_BINDINGS];
  5634. ID3D11Buffer* cs_cbufs[_SG_D3D11_MAX_STAGE_UB_BINDINGS];
  5635. } d3d11;
  5636. } _sg_d3d11_shader_t;
  5637. typedef _sg_d3d11_shader_t _sg_shader_t;
  5638. typedef struct _sg_pipeline_s {
  5639. _sg_slot_t slot;
  5640. _sg_pipeline_common_t cmn;
  5641. struct {
  5642. UINT stencil_ref;
  5643. UINT vb_strides[SG_MAX_VERTEXBUFFER_BINDSLOTS];
  5644. D3D_PRIMITIVE_TOPOLOGY topology;
  5645. DXGI_FORMAT index_format;
  5646. ID3D11InputLayout* il;
  5647. ID3D11RasterizerState* rs;
  5648. ID3D11DepthStencilState* dss;
  5649. ID3D11BlendState* bs;
  5650. } d3d11;
  5651. } _sg_d3d11_pipeline_t;
  5652. typedef _sg_d3d11_pipeline_t _sg_pipeline_t;
  5653. typedef struct _sg_view_s {
  5654. _sg_slot_t slot;
  5655. _sg_view_common_t cmn;
  5656. struct {
  5657. ID3D11ShaderResourceView* srv;
  5658. ID3D11UnorderedAccessView* uav;
  5659. ID3D11RenderTargetView* rtv;
  5660. ID3D11DepthStencilView* dsv;
  5661. } d3d11;
  5662. } _sg_d3d11_view_t;
  5663. typedef _sg_d3d11_view_t _sg_view_t;
  5664. typedef struct {
  5665. bool valid;
  5666. ID3D11Device* dev;
  5667. ID3D11DeviceContext* ctx;
  5668. struct {
  5669. ID3D11RenderTargetView* render_view;
  5670. ID3D11RenderTargetView* resolve_view;
  5671. } cur_swapchain;
  5672. // on-demand loaded d3dcompiler_47.dll handles
  5673. HINSTANCE d3dcompiler_dll;
  5674. bool d3dcompiler_dll_load_failed;
  5675. pD3DCompile D3DCompile_func;
  5676. // static bindings arrays
  5677. struct {
  5678. ID3D11Buffer* vbs[SG_MAX_VERTEXBUFFER_BINDSLOTS];
  5679. UINT vb_offsets[SG_MAX_VERTEXBUFFER_BINDSLOTS];
  5680. ID3D11ShaderResourceView* vs_srvs[_SG_D3D11_MAX_STAGE_SRV_BINDINGS];
  5681. ID3D11ShaderResourceView* fs_srvs[_SG_D3D11_MAX_STAGE_SRV_BINDINGS];
  5682. ID3D11ShaderResourceView* cs_srvs[_SG_D3D11_MAX_STAGE_SRV_BINDINGS];
  5683. ID3D11UnorderedAccessView* cs_uavs[_SG_D3D11_MAX_STAGE_UAV_BINDINGS];
  5684. ID3D11SamplerState* vs_smps[_SG_D3D11_MAX_STAGE_SMP_BINDINGS];
  5685. ID3D11SamplerState* fs_smps[_SG_D3D11_MAX_STAGE_SMP_BINDINGS];
  5686. ID3D11SamplerState* cs_smps[_SG_D3D11_MAX_STAGE_SMP_BINDINGS];
  5687. } bnd;
  5688. // global subresourcedata array for texture updates
  5689. D3D11_SUBRESOURCE_DATA subres_data[_SG_D3D11_MAX_TEXTURE_SUBRESOURCES];
  5690. } _sg_d3d11_backend_t;
  5691. #elif defined(SOKOL_METAL)
  5692. #if defined(_SG_TARGET_MACOS) || defined(_SG_TARGET_IOS_SIMULATOR)
  5693. #define _SG_MTL_UB_ALIGN (256)
  5694. #else
  5695. #define _SG_MTL_UB_ALIGN (16)
  5696. #endif
  5697. #define _SG_MTL_INVALID_SLOT_INDEX (0)
  5698. typedef struct {
  5699. uint32_t frame_index; // frame index at which it is safe to release this resource
  5700. int slot_index;
  5701. } _sg_mtl_release_item_t;
  5702. typedef struct {
  5703. NSMutableArray* pool;
  5704. int num_slots;
  5705. int free_queue_top;
  5706. int* free_queue;
  5707. int release_queue_front;
  5708. int release_queue_back;
  5709. _sg_mtl_release_item_t* release_queue;
  5710. } _sg_mtl_idpool_t;
  5711. typedef struct _sg_buffer_s {
  5712. _sg_slot_t slot;
  5713. _sg_buffer_common_t cmn;
  5714. struct {
  5715. int buf[SG_NUM_INFLIGHT_FRAMES]; // index into _sg_mtl_pool
  5716. } mtl;
  5717. } _sg_mtl_buffer_t;
  5718. typedef _sg_mtl_buffer_t _sg_buffer_t;
  5719. typedef struct _sg_image_s {
  5720. _sg_slot_t slot;
  5721. _sg_image_common_t cmn;
  5722. struct {
  5723. int tex[SG_NUM_INFLIGHT_FRAMES];
  5724. } mtl;
  5725. } _sg_mtl_image_t;
  5726. typedef _sg_mtl_image_t _sg_image_t;
  5727. typedef struct _sg_sampler_s {
  5728. _sg_slot_t slot;
  5729. _sg_sampler_common_t cmn;
  5730. struct {
  5731. int sampler_state;
  5732. } mtl;
  5733. } _sg_mtl_sampler_t;
  5734. typedef _sg_mtl_sampler_t _sg_sampler_t;
  5735. typedef struct {
  5736. int mtl_lib;
  5737. int mtl_func;
  5738. } _sg_mtl_shader_func_t;
  5739. typedef struct _sg_shader_s {
  5740. _sg_slot_t slot;
  5741. _sg_shader_common_t cmn;
  5742. struct {
  5743. _sg_mtl_shader_func_t vertex_func;
  5744. _sg_mtl_shader_func_t fragment_func;
  5745. _sg_mtl_shader_func_t compute_func;
  5746. MTLSize threads_per_threadgroup;
  5747. uint8_t ub_buffer_n[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  5748. uint8_t view_buffer_texture_n[SG_MAX_VIEW_BINDSLOTS];
  5749. uint8_t smp_sampler_n[SG_MAX_SAMPLER_BINDSLOTS];
  5750. } mtl;
  5751. } _sg_mtl_shader_t;
  5752. typedef _sg_mtl_shader_t _sg_shader_t;
  5753. typedef struct _sg_pipeline_s {
  5754. _sg_slot_t slot;
  5755. _sg_pipeline_common_t cmn;
  5756. struct {
  5757. MTLPrimitiveType prim_type;
  5758. int index_size;
  5759. MTLIndexType index_type;
  5760. MTLCullMode cull_mode;
  5761. MTLWinding winding;
  5762. uint32_t stencil_ref;
  5763. MTLSize threads_per_threadgroup;
  5764. int cps; // MTLComputePipelineState
  5765. int rps; // MTLRenderPipelineState
  5766. int dss; // MTLDepthStencilState
  5767. } mtl;
  5768. } _sg_mtl_pipeline_t;
  5769. typedef _sg_mtl_pipeline_t _sg_pipeline_t;
  5770. typedef struct _sg_view_s {
  5771. _sg_slot_t slot;
  5772. _sg_view_common_t cmn;
  5773. struct {
  5774. int tex_view[SG_NUM_INFLIGHT_FRAMES];
  5775. } mtl;
  5776. } _sg_mtl_view_t;
  5777. typedef _sg_mtl_view_t _sg_view_t;
  5778. // resource binding state cache
  5779. //
  5780. // NOTE: reserved buffer bindslot ranges:
  5781. // - 0..<=7: uniform buffer bindings
  5782. // - 8..<=22: storage buffer bindings
  5783. // - 23..<=30: vertex buffer bindings
  5784. //
  5785. #define _SG_MTL_MAX_STAGE_BUFFER_BINDINGS (31) // see: https://developer.apple.com/metal/Metal-Feature-Set-Tables.pdf
  5786. #define _SG_MTL_MAX_STAGE_UB_BINDINGS (_SG_MAX_UNIFORMBLOCK_BINDINGS_PER_STAGE)
  5787. #define _SG_MTL_MAX_STAGE_UB_SBUF_BINDINGS (_SG_MTL_MAX_STAGE_BUFFER_BINDINGS - SG_MAX_VERTEXBUFFER_BINDSLOTS)
  5788. #define _SG_MTL_MAX_STAGE_TEXTURE_BINDINGS (SG_MAX_VIEW_BINDSLOTS)
  5789. #define _SG_MTL_MAX_STAGE_SAMPLER_BINDINGS (SG_MAX_SAMPLER_BINDSLOTS)
  5790. typedef struct {
  5791. _sg_sref_t sref;
  5792. int active_slot;
  5793. int offset;
  5794. } _sg_mtl_cache_buf_t;
  5795. typedef struct {
  5796. _sg_sref_t sref;
  5797. int active_slot;
  5798. } _sg_mtl_cache_tex_t;
  5799. typedef enum {
  5800. _SG_MTL_CACHE_CMP_EQUAL = 0,
  5801. _SG_MTL_CACHE_CMP_SREF = (1<<1),
  5802. _SG_MTL_CACHE_CMP_OFFSET = (1<<2),
  5803. _SG_MTL_CACHE_CMP_ACTIVESLOT = (1<<3),
  5804. } _sg_mtl_cache_cmp_result_t;
  5805. typedef struct {
  5806. _sg_sref_t cur_pip;
  5807. _sg_buffer_ref_t cur_ibuf;
  5808. int cur_ibuf_offset;
  5809. _sg_mtl_cache_buf_t cur_vsbufs[_SG_MTL_MAX_STAGE_BUFFER_BINDINGS];
  5810. _sg_mtl_cache_buf_t cur_fsbufs[_SG_MTL_MAX_STAGE_BUFFER_BINDINGS];
  5811. _sg_mtl_cache_buf_t cur_csbufs[_SG_MTL_MAX_STAGE_BUFFER_BINDINGS];
  5812. _sg_mtl_cache_tex_t cur_vstexs[_SG_MTL_MAX_STAGE_TEXTURE_BINDINGS];
  5813. _sg_mtl_cache_tex_t cur_fstexs[_SG_MTL_MAX_STAGE_TEXTURE_BINDINGS];
  5814. _sg_mtl_cache_tex_t cur_cstexs[_SG_MTL_MAX_STAGE_TEXTURE_BINDINGS];
  5815. _sg_sref_t cur_vssmps[_SG_MTL_MAX_STAGE_SAMPLER_BINDINGS];
  5816. _sg_sref_t cur_fssmps[_SG_MTL_MAX_STAGE_SAMPLER_BINDINGS];
  5817. _sg_sref_t cur_cssmps[_SG_MTL_MAX_STAGE_SAMPLER_BINDINGS];
  5818. } _sg_mtl_cache_t;
  5819. typedef struct {
  5820. bool valid;
  5821. bool use_shared_storage_mode;
  5822. uint32_t cur_frame_rotate_index;
  5823. int ub_size;
  5824. int cur_ub_offset;
  5825. uint8_t* cur_ub_base_ptr;
  5826. _sg_mtl_cache_t cache;
  5827. _sg_mtl_idpool_t idpool;
  5828. dispatch_semaphore_t sem;
  5829. id<MTLDevice> device;
  5830. id<MTLCommandQueue> cmd_queue;
  5831. id<MTLCommandBuffer> cmd_buffer;
  5832. id<MTLRenderCommandEncoder> render_cmd_encoder;
  5833. id<MTLComputeCommandEncoder> compute_cmd_encoder;
  5834. id<CAMetalDrawable> cur_drawable;
  5835. id<MTLBuffer> uniform_buffers[SG_NUM_INFLIGHT_FRAMES];
  5836. } _sg_mtl_backend_t;
  5837. #elif defined(SOKOL_WGPU)
  5838. #define _SG_WGPU_ROWPITCH_ALIGN (256)
  5839. #define _SG_WGPU_MAX_UNIFORM_UPDATE_SIZE (1<<16) // also see WGPULimits.maxUniformBufferBindingSize
  5840. #define _SG_WGPU_MAX_BINDGROUPS (2) // 0: uniforms, 1: images, samplers, storage buffers, storage images
  5841. #define _SG_WGPU_UB_BINDGROUP_INDEX (0)
  5842. #define _SG_WGPU_VIEW_SMP_BINDGROUP_INDEX (1)
  5843. #define _SG_WGPU_MAX_UB_BINDGROUP_ENTRIES (SG_MAX_UNIFORMBLOCK_BINDSLOTS)
  5844. #define _SG_WGPU_MAX_UB_BINDGROUP_WGSL_SLOTS (2 * SG_MAX_UNIFORMBLOCK_BINDSLOTS)
  5845. #define _SG_WGPU_MAX_VIEW_SMP_BINDGROUP_ENTRIES (SG_MAX_VIEW_BINDSLOTS + SG_MAX_SAMPLER_BINDSLOTS)
  5846. #define _SG_WGPU_MAX_VIEW_SMP_BINDGROUP_WGSL_SLOTS (128)
  5847. typedef struct _sg_buffer_s {
  5848. _sg_slot_t slot;
  5849. _sg_buffer_common_t cmn;
  5850. struct {
  5851. WGPUBuffer buf;
  5852. } wgpu;
  5853. } _sg_wgpu_buffer_t;
  5854. typedef _sg_wgpu_buffer_t _sg_buffer_t;
  5855. typedef struct _sg_image_s {
  5856. _sg_slot_t slot;
  5857. _sg_image_common_t cmn;
  5858. struct {
  5859. WGPUTexture tex;
  5860. } wgpu;
  5861. } _sg_wgpu_image_t;
  5862. typedef _sg_wgpu_image_t _sg_image_t;
  5863. typedef struct _sg_sampler_s {
  5864. _sg_slot_t slot;
  5865. _sg_sampler_common_t cmn;
  5866. struct {
  5867. WGPUSampler smp;
  5868. } wgpu;
  5869. } _sg_wgpu_sampler_t;
  5870. typedef _sg_wgpu_sampler_t _sg_sampler_t;
  5871. typedef struct {
  5872. WGPUShaderModule module;
  5873. _sg_str_t entry;
  5874. } _sg_wgpu_shader_func_t;
  5875. typedef struct _sg_shader_s {
  5876. _sg_slot_t slot;
  5877. _sg_shader_common_t cmn;
  5878. struct {
  5879. _sg_wgpu_shader_func_t vertex_func;
  5880. _sg_wgpu_shader_func_t fragment_func;
  5881. _sg_wgpu_shader_func_t compute_func;
  5882. WGPUBindGroupLayout bgl_ub;
  5883. WGPUBindGroup bg_ub;
  5884. WGPUBindGroupLayout bgl_view_smp;
  5885. // a mapping of sokol-gfx bind slots to setBindGroup dynamic-offset-array indices
  5886. uint8_t ub_num_dynoffsets;
  5887. uint8_t ub_dynoffsets[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  5888. // indexed by sokol-gfx bind slot:
  5889. uint8_t ub_grp0_bnd_n[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  5890. uint8_t view_grp1_bnd_n[SG_MAX_VIEW_BINDSLOTS];
  5891. uint8_t smp_grp1_bnd_n[SG_MAX_SAMPLER_BINDSLOTS];
  5892. } wgpu;
  5893. } _sg_wgpu_shader_t;
  5894. typedef _sg_wgpu_shader_t _sg_shader_t;
  5895. typedef struct _sg_pipeline_s {
  5896. _sg_slot_t slot;
  5897. _sg_pipeline_common_t cmn;
  5898. struct {
  5899. WGPURenderPipeline rpip;
  5900. WGPUComputePipeline cpip;
  5901. WGPUColor blend_color;
  5902. } wgpu;
  5903. } _sg_wgpu_pipeline_t;
  5904. typedef _sg_wgpu_pipeline_t _sg_pipeline_t;
  5905. typedef struct _sg_view_s {
  5906. _sg_slot_t slot;
  5907. _sg_view_common_t cmn;
  5908. struct {
  5909. WGPUTextureView view;
  5910. } wgpu;
  5911. } _sg_wgpu_view_t;
  5912. typedef _sg_wgpu_view_t _sg_view_t;
  5913. // a pool of per-frame uniform buffers
  5914. typedef struct {
  5915. uint32_t num_bytes;
  5916. uint32_t offset; // current offset into buf
  5917. uint8_t* staging; // intermediate buffer for uniform data updates
  5918. WGPUBuffer buf; // the GPU-side uniform buffer
  5919. bool dirty;
  5920. uint32_t bind_offsets[SG_MAX_UNIFORMBLOCK_BINDSLOTS]; // NOTE: index is sokol-gfx ub slot index!
  5921. } _sg_wgpu_uniform_system_t;
  5922. typedef struct {
  5923. uint32_t id;
  5924. } _sg_wgpu_bindgroup_handle_t;
  5925. typedef enum {
  5926. _SG_WGPU_BINDGROUPSCACHEITEMTYPE_NONE = 0,
  5927. _SG_WGPU_BINDGROUPSCACHEITEMTYPE_VIEW = 1,
  5928. _SG_WGPU_BINDGROUPSCACHEITEMTYPE_SAMPLER = 2,
  5929. _SG_WGPU_BINDGROUPSCACHEITEMTYPE_PIPELINE = 3,
  5930. } _sg_wgpu_bindgroups_cache_item_type_t;
  5931. #define _SG_WGPU_BINDGROUPSCACHEKEY_NUM_ITEMS (1 + _SG_WGPU_MAX_VIEW_SMP_BINDGROUP_ENTRIES)
  5932. typedef struct {
  5933. uint64_t hash;
  5934. // the format of cache key items is BBTCCCCCIIIIIIII
  5935. // where
  5936. // - BB: 8 bits WGPU binding
  5937. // - T: 2 bits _sg_wgpu_bindgroups_cache_item_type_t
  5938. // - CCCCC: 22 bits slot.uninit_count
  5939. // - IIIIIIII: 32 bits slot.id
  5940. //
  5941. // where the item type is a per-resource-type bit pattern
  5942. uint64_t items[_SG_WGPU_BINDGROUPSCACHEKEY_NUM_ITEMS];
  5943. } _sg_wgpu_bindgroups_cache_key_t;
  5944. typedef struct {
  5945. uint32_t num; // must be 2^n
  5946. uint32_t index_mask; // mask to turn hash into valid index
  5947. _sg_wgpu_bindgroup_handle_t* items;
  5948. } _sg_wgpu_bindgroups_cache_t;
  5949. typedef struct {
  5950. _sg_slot_t slot;
  5951. WGPUBindGroup bindgroup;
  5952. _sg_wgpu_bindgroups_cache_key_t key;
  5953. } _sg_wgpu_bindgroup_t;
  5954. typedef struct {
  5955. _sg_pool_t pool;
  5956. _sg_wgpu_bindgroup_t* bindgroups;
  5957. } _sg_wgpu_bindgroups_pool_t;
  5958. typedef struct {
  5959. struct {
  5960. sg_buffer buffer;
  5961. uint64_t offset;
  5962. } vbs[SG_MAX_VERTEXBUFFER_BINDSLOTS];
  5963. struct {
  5964. sg_buffer buffer;
  5965. uint64_t offset;
  5966. } ib;
  5967. _sg_wgpu_bindgroup_handle_t bg;
  5968. } _sg_wgpu_bindings_cache_t;
  5969. // the WGPU backend state
  5970. typedef struct {
  5971. bool valid;
  5972. WGPUDevice dev;
  5973. WGPULimits limits;
  5974. WGPUQueue queue;
  5975. WGPUCommandEncoder cmd_enc;
  5976. WGPURenderPassEncoder rpass_enc;
  5977. WGPUComputePassEncoder cpass_enc;
  5978. _sg_wgpu_uniform_system_t uniform;
  5979. _sg_wgpu_bindings_cache_t bindings_cache;
  5980. _sg_wgpu_bindgroups_cache_t bindgroups_cache;
  5981. _sg_wgpu_bindgroups_pool_t bindgroups_pool;
  5982. } _sg_wgpu_backend_t;
  5983. #elif defined(SOKOL_VULKAN)
  5984. #define _SG_VK_MAX_UNIFORM_UPDATE_SIZE (1<<16)
  5985. #define _SG_VK_NUM_DESCRIPTORSETS (2) // 0: uniforms, 1: images, samplers, storage buffers, storage images
  5986. #define _SG_VK_UB_DESCRIPTORSET_INDEX (0)
  5987. #define _SG_VK_VIEW_SMP_DESCRIPTORSET_INDEX (1)
  5988. #define _SG_VK_MAX_UB_DESCRIPTORSET_ENTRIES (SG_MAX_UNIFORMBLOCK_BINDSLOTS)
  5989. #define _SG_VK_MAX_UB_DESCRIPTORSET_SLOTS (2 * SG_MAX_UNIFORMBLOCK_BINDSLOTS)
  5990. #define _SG_VK_MAX_VIEW_SMP_DESCRIPTORSET_ENTRIES (SG_MAX_VIEW_BINDSLOTS + SG_MAX_SAMPLER_BINDSLOTS)
  5991. #define _SG_VK_MAX_VIEW_SMP_DESCRIPTORSET_SLOTS (128)
  5992. #define _SG_VK_MAX_DESCRIPTOR_DATA_SIZE (256) // FIXME: llvmpipe needs 280 bytes, do we need to care about that?
  5993. typedef enum {
  5994. _SG_VK_MEMTYPE_STORAGE_BUFFER,
  5995. _SG_VK_MEMTYPE_GENERIC_BUFFER,
  5996. _SG_VK_MEMTYPE_IMAGE,
  5997. _SG_VK_MEMTYPE_STAGING_COPY,
  5998. _SG_VK_MEMTYPE_STAGING_STREAM,
  5999. _SG_VK_MEMTYPE_UNIFORMS,
  6000. _SG_VK_MEMTYPE_DESCRIPTORS,
  6001. } _sg_vk_memtype_t;
  6002. typedef void (*_sg_vk_delete_queue_destructor_t)(void* obj);
  6003. typedef struct {
  6004. _sg_vk_delete_queue_destructor_t destructor;
  6005. void* obj;
  6006. } _sg_vk_delete_queue_item_t;
  6007. typedef struct {
  6008. uint32_t index;
  6009. uint32_t num;
  6010. _sg_vk_delete_queue_item_t* items;
  6011. } _sg_vk_delete_queue_t;
  6012. typedef enum {
  6013. _SG_VK_ACCESS_NONE = (0), // initial state for new resources
  6014. _SG_VK_ACCESS_STAGING = (1<<0),
  6015. _SG_VK_ACCESS_VERTEXBUFFER = (1<<1),
  6016. _SG_VK_ACCESS_INDEXBUFFER = (1<<2),
  6017. _SG_VK_ACCESS_STORAGEBUFFER_RO = (1<<3),
  6018. _SG_VK_ACCESS_STORAGEBUFFER_RW = (1<<4),
  6019. _SG_VK_ACCESS_TEXTURE = (1<<5),
  6020. _SG_VK_ACCESS_STORAGEIMAGE = (1<<6),
  6021. _SG_VK_ACCESS_COLOR_ATTACHMENT = (1<<7),
  6022. _SG_VK_ACCESS_RESOLVE_ATTACHMENT = (1<<8),
  6023. _SG_VK_ACCESS_DEPTH_ATTACHMENT = (1<<9),
  6024. _SG_VK_ACCESS_STENCIL_ATTACHMENT = (1<<10),
  6025. _SG_VK_ACCESS_DISCARD = (1<<11), // in combination with attachments
  6026. _SG_VK_ACCESS_PRESENT = (1<<12),
  6027. } _sg_vk_access_bits_t;
  6028. typedef int _sg_vk_access_t;
  6029. typedef struct _sg_buffer_s {
  6030. _sg_slot_t slot;
  6031. _sg_buffer_common_t cmn;
  6032. struct {
  6033. VkBuffer buf;
  6034. VkDeviceMemory mem;
  6035. VkDeviceAddress dev_addr; // only valid for storage buffers
  6036. _sg_vk_access_t cur_access;
  6037. } vk;
  6038. } _sg_vk_buffer_t;
  6039. typedef _sg_vk_buffer_t _sg_buffer_t;
  6040. typedef struct _sg_image_s {
  6041. _sg_slot_t slot;
  6042. _sg_image_common_t cmn;
  6043. struct {
  6044. VkImage img;
  6045. VkDeviceMemory mem;
  6046. _sg_vk_access_t cur_access;
  6047. } vk;
  6048. } _sg_vk_image_t;
  6049. typedef _sg_vk_image_t _sg_image_t;
  6050. typedef struct _sg_sampler_s {
  6051. _sg_slot_t slot;
  6052. _sg_sampler_common_t cmn;
  6053. struct {
  6054. VkSampler smp;
  6055. size_t descriptor_size;
  6056. uint8_t descriptor_data[_SG_VK_MAX_DESCRIPTOR_DATA_SIZE];
  6057. } vk;
  6058. } _sg_vk_sampler_t;
  6059. typedef _sg_vk_sampler_t _sg_sampler_t;
  6060. typedef struct {
  6061. VkShaderModule module;
  6062. _sg_str_t entry;
  6063. } _sg_vk_shader_func_t;
  6064. typedef struct _sg_shader_s {
  6065. _sg_slot_t slot;
  6066. _sg_shader_common_t cmn;
  6067. struct {
  6068. _sg_vk_shader_func_t vertex_func;
  6069. _sg_vk_shader_func_t fragment_func;
  6070. _sg_vk_shader_func_t compute_func;
  6071. VkDescriptorSetLayout ub_dsl;
  6072. VkDeviceSize ub_dset_size;
  6073. VkDescriptorSetLayout view_smp_dsl;
  6074. VkDeviceSize view_smp_dset_size;
  6075. VkPipelineLayout pip_layout;
  6076. // indexed by sokol-gfx bind-slot
  6077. uint8_t ub_set0_bnd_n[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  6078. uint8_t view_set1_bnd_n[SG_MAX_VIEW_BINDSLOTS];
  6079. uint8_t smp_set1_bnd_n[SG_MAX_SAMPLER_BINDSLOTS];
  6080. // relative descriptor offsets to start of descriptor set in descriptor buffer
  6081. uint16_t ub_dset_offsets[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  6082. uint16_t view_dset_offsets[SG_MAX_VIEW_BINDSLOTS];
  6083. uint16_t smp_dset_offsets[SG_MAX_SAMPLER_BINDSLOTS];
  6084. } vk;
  6085. } _sg_vk_shader_t;
  6086. typedef _sg_vk_shader_t _sg_shader_t;
  6087. typedef struct _sg_pipeline_s {
  6088. _sg_slot_t slot;
  6089. _sg_pipeline_common_t cmn;
  6090. struct {
  6091. VkPipeline pip;
  6092. } vk;
  6093. } _sg_vk_pipeline_t;
  6094. typedef _sg_vk_pipeline_t _sg_pipeline_t;
  6095. typedef struct _sg_view_s {
  6096. _sg_slot_t slot;
  6097. _sg_view_common_t cmn;
  6098. struct {
  6099. VkImageView img_view;
  6100. size_t descriptor_size;
  6101. uint8_t descriptor_data[_SG_VK_MAX_DESCRIPTOR_DATA_SIZE];
  6102. } vk;
  6103. } _sg_vk_view_t;
  6104. typedef _sg_vk_view_t _sg_view_t;
  6105. // a double-buffer cpu-write / gpu-read buffer
  6106. #define _SG_VK_SHARED_BUFFER_OVERFLOW_RESULT (0xFFFFFFFF)
  6107. typedef struct {
  6108. uint32_t size; // buffer size
  6109. uint32_t align; // required buffer offset alignemnt
  6110. uint32_t offset; // current offset into buffer
  6111. VkBuffer cur_buf; // currently mapped buffer
  6112. void* cur_mem_ptr; // current pointer into currently mapped buffer
  6113. VkDeviceAddress cur_dev_addr; // current buffer device address (only valid for some buffer types)
  6114. bool overflown; // true when in overflown state
  6115. struct {
  6116. VkBuffer buf;
  6117. VkDeviceMemory mem;
  6118. VkDeviceAddress dev_addr; // only valid for some buffer types!
  6119. void* mem_ptr;
  6120. } slots[SG_NUM_INFLIGHT_FRAMES];
  6121. } _sg_vk_shared_buffer_t;
  6122. typedef struct {
  6123. VkDescriptorAddressInfoEXT addr_info;
  6124. VkDescriptorGetInfoEXT get_info;
  6125. } _sg_vk_uniform_bindinfo_t;
  6126. typedef struct {
  6127. bool valid;
  6128. VkPhysicalDevice phys_dev;
  6129. VkDevice dev;
  6130. VkQueue queue;
  6131. uint32_t queue_family_index;
  6132. sg_vulkan_swapchain swapchain;
  6133. VkSemaphore present_complete_sem;
  6134. VkSemaphore render_finished_sem;
  6135. // extension function pointers
  6136. struct {
  6137. PFN_vkGetDescriptorSetLayoutSizeEXT get_descriptor_set_layout_size;
  6138. PFN_vkGetDescriptorSetLayoutBindingOffsetEXT get_descriptor_set_layout_binding_offset;
  6139. PFN_vkGetDescriptorEXT get_descriptor;
  6140. PFN_vkCmdBindDescriptorBuffersEXT cmd_bind_descriptor_buffers;
  6141. PFN_vkCmdSetDescriptorBufferOffsetsEXT cmd_set_descriptor_buffer_offsets;
  6142. } ext;
  6143. uint32_t frame_slot;
  6144. struct {
  6145. VkCommandPool cmd_pool;
  6146. VkCommandBuffer cmd_buf;
  6147. VkCommandBuffer stream_cmd_buf;
  6148. struct {
  6149. VkFence fence;
  6150. VkCommandBuffer command_buffer;
  6151. VkCommandBuffer stream_command_buffer;
  6152. _sg_vk_delete_queue_t delete_queue;
  6153. } slot[SG_NUM_INFLIGHT_FRAMES];
  6154. } frame;
  6155. // staging system
  6156. struct {
  6157. // staging system for immutable and dynamic resources, generally causes a stall
  6158. struct {
  6159. VkCommandPool cmd_pool;
  6160. VkCommandBuffer cmd_buf;
  6161. uint32_t size;
  6162. VkBuffer buf;
  6163. VkDeviceMemory mem;
  6164. } copy;
  6165. // staging buffer for per-frame streaming updates
  6166. _sg_vk_shared_buffer_t stream;
  6167. } stage;
  6168. // uniform update system
  6169. bool uniforms_dirty;
  6170. _sg_vk_shared_buffer_t uniform;
  6171. _sg_vk_uniform_bindinfo_t uniform_bindinfos[SG_MAX_UNIFORMBLOCK_BINDSLOTS];
  6172. // resource binding system (using descriptor buffers)
  6173. _sg_vk_shared_buffer_t bind;
  6174. // hazard tracking system for buffers and images
  6175. struct {
  6176. _sg_track_t buffers;
  6177. _sg_track_t images;
  6178. } track;
  6179. // device properties and features (initialized at startup)
  6180. VkPhysicalDeviceProperties2 dev_props;
  6181. VkPhysicalDeviceDescriptorBufferPropertiesEXT descriptor_buffer_props;
  6182. VkPhysicalDeviceFeatures2 dev_features;
  6183. } _sg_vk_backend_t;
  6184. #endif // SOKOL_VULKAN
  6185. // this *MUST* remain 0
  6186. #define _SG_INVALID_SLOT_INDEX (0)
  6187. typedef struct _sg_pools_s {
  6188. _sg_pool_t buffer_pool;
  6189. _sg_pool_t image_pool;
  6190. _sg_pool_t sampler_pool;
  6191. _sg_pool_t shader_pool;
  6192. _sg_pool_t pipeline_pool;
  6193. _sg_pool_t view_pool;
  6194. _sg_buffer_t* buffers;
  6195. _sg_image_t* images;
  6196. _sg_sampler_t* samplers;
  6197. _sg_shader_t* shaders;
  6198. _sg_pipeline_t* pipelines;
  6199. _sg_view_t* views;
  6200. } _sg_pools_t;
  6201. typedef struct {
  6202. int num; // number of allocated commit listener items
  6203. int upper; // the current upper index (no valid items past this point)
  6204. sg_commit_listener* items;
  6205. } _sg_commit_listeners_t;
  6206. // resolved pass attachments struct
  6207. typedef struct {
  6208. bool empty;
  6209. int num_color_views;
  6210. _sg_view_t* color_views[SG_MAX_COLOR_ATTACHMENTS];
  6211. _sg_view_t* resolve_views[SG_MAX_COLOR_ATTACHMENTS];
  6212. _sg_view_t* ds_view;
  6213. } _sg_attachments_ptrs_t;
  6214. // resolved resource bindings struct
  6215. typedef struct {
  6216. _sg_pipeline_t* pip;
  6217. int vb_offsets[SG_MAX_VERTEXBUFFER_BINDSLOTS];
  6218. int ib_offset;
  6219. _sg_buffer_t* vbs[SG_MAX_VERTEXBUFFER_BINDSLOTS];
  6220. _sg_buffer_t* ib;
  6221. _sg_view_t* views[SG_MAX_VIEW_BINDSLOTS];
  6222. _sg_sampler_t* smps[SG_MAX_SAMPLER_BINDSLOTS];
  6223. } _sg_bindings_ptrs_t;
  6224. typedef struct {
  6225. bool sample;
  6226. bool filter;
  6227. bool render;
  6228. bool blend;
  6229. bool msaa;
  6230. bool depth;
  6231. bool read;
  6232. bool write;
  6233. } _sg_pixelformat_info_t;
  6234. typedef struct {
  6235. bool valid;
  6236. sg_desc desc; // original desc with default values patched in
  6237. uint32_t frame_index;
  6238. struct {
  6239. bool valid;
  6240. bool in_pass;
  6241. bool is_compute;
  6242. _sg_dimi_t dim;
  6243. sg_attachments atts;
  6244. sg_pass_action action;
  6245. struct {
  6246. sg_pixel_format color_fmt;
  6247. sg_pixel_format depth_fmt;
  6248. int sample_count;
  6249. } swapchain;
  6250. } cur_pass;
  6251. _sg_pipeline_ref_t cur_pip;
  6252. bool next_draw_valid;
  6253. bool use_indexed_draw;
  6254. bool use_instanced_draw;
  6255. uint32_t required_bindings_and_uniforms; // used to check that bindings and uniforms are applied after applying pipeline
  6256. uint32_t applied_bindings_and_uniforms; // bits 0..7: uniform blocks, bit 8: bindings
  6257. #if defined(SOKOL_DEBUG)
  6258. sg_log_item validate_error;
  6259. #endif
  6260. _sg_pools_t pools;
  6261. sg_backend backend;
  6262. sg_features features;
  6263. sg_limits limits;
  6264. _sg_pixelformat_info_t formats[_SG_PIXELFORMAT_NUM];
  6265. bool stats_enabled;
  6266. sg_stats stats;
  6267. #if defined(_SOKOL_ANY_GL)
  6268. _sg_gl_backend_t gl;
  6269. #elif defined(SOKOL_METAL)
  6270. _sg_mtl_backend_t mtl;
  6271. #elif defined(SOKOL_D3D11)
  6272. _sg_d3d11_backend_t d3d11;
  6273. #elif defined(SOKOL_WGPU)
  6274. _sg_wgpu_backend_t wgpu;
  6275. #elif defined(SOKOL_VULKAN)
  6276. _sg_vk_backend_t vk;
  6277. #endif
  6278. #if defined(SOKOL_TRACE_HOOKS)
  6279. sg_trace_hooks hooks;
  6280. #endif
  6281. _sg_commit_listeners_t commit_listeners;
  6282. } _sg_state_t;
  6283. static _sg_state_t _sg;
  6284. // ██ ██████ ██████ ██████ ██ ███ ██ ██████
  6285. // ██ ██ ██ ██ ██ ██ ████ ██ ██
  6286. // ██ ██ ██ ██ ███ ██ ███ ██ ██ ██ ██ ██ ███
  6287. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  6288. // ███████ ██████ ██████ ██████ ██ ██ ████ ██████
  6289. //
  6290. // >>logging
  6291. #if defined(SOKOL_DEBUG)
  6292. #define _SG_LOGITEM_XMACRO(item,msg) #item ": " msg,
  6293. static const char* _sg_log_messages[] = {
  6294. _SG_LOG_ITEMS
  6295. };
  6296. #undef _SG_LOGITEM_XMACRO
  6297. #endif // SOKOL_DEBUG
  6298. #define _SG_PANIC(code) _sg_log(SG_LOGITEM_ ##code, 0, 0, __LINE__)
  6299. #define _SG_ERROR(code) _sg_log(SG_LOGITEM_ ##code, 1, 0, __LINE__)
  6300. #define _SG_WARN(code) _sg_log(SG_LOGITEM_ ##code, 2, 0, __LINE__)
  6301. #define _SG_INFO(code) _sg_log(SG_LOGITEM_ ##code, 3, 0, __LINE__)
  6302. #define _SG_LOGMSG(code,msg) _sg_log(SG_LOGITEM_ ##code, 3, msg, __LINE__)
  6303. #define _SG_VALIDATE(cond,code) if (!(cond)){ _sg.validate_error = SG_LOGITEM_ ##code; _sg_log(SG_LOGITEM_ ##code, 1, 0, __LINE__); }
  6304. static void _sg_log(sg_log_item log_item, uint32_t log_level, const char* msg, uint32_t line_nr) {
  6305. if (_sg.desc.logger.func) {
  6306. const char* filename = 0;
  6307. #if defined(SOKOL_DEBUG)
  6308. filename = __FILE__;
  6309. if (0 == msg) {
  6310. msg = _sg_log_messages[log_item];
  6311. }
  6312. #endif
  6313. _sg.desc.logger.func("sg", log_level, (uint32_t)log_item, msg, line_nr, filename, _sg.desc.logger.user_data);
  6314. } else {
  6315. // for log level PANIC it would be 'undefined behaviour' to continue
  6316. if (log_level == 0) {
  6317. abort();
  6318. }
  6319. }
  6320. }
  6321. // ███ ███ ███████ ███ ███ ██████ ██████ ██ ██
  6322. // ████ ████ ██ ████ ████ ██ ██ ██ ██ ██ ██
  6323. // ██ ████ ██ █████ ██ ████ ██ ██ ██ ██████ ████
  6324. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  6325. // ██ ██ ███████ ██ ██ ██████ ██ ██ ██
  6326. //
  6327. // >>memory
  6328. _SOKOL_PRIVATE int _sg_roundup(int val, int round_to) {
  6329. return (val+(round_to-1)) & ~(round_to-1);
  6330. }
  6331. _SOKOL_PRIVATE uint32_t _sg_roundup_u32(uint32_t val, uint32_t round_to) {
  6332. return (val+(round_to-1)) & ~(round_to-1);
  6333. }
  6334. _SOKOL_PRIVATE uint64_t _sg_roundup_u64(uint64_t val, uint64_t round_to) {
  6335. return (val+(round_to-1)) & ~(round_to-1);
  6336. }
  6337. _SOKOL_PRIVATE bool _sg_multiple_u64(uint64_t val, uint64_t of) {
  6338. return (val & (of-1)) == 0;
  6339. }
  6340. // a helper macro to clear a struct with potentially ARC'ed ObjC references
  6341. #if defined(SOKOL_METAL)
  6342. #if defined(__cplusplus)
  6343. #define _SG_CLEAR_ARC_STRUCT(type, item) { item = type(); }
  6344. #else
  6345. #define _SG_CLEAR_ARC_STRUCT(type, item) { item = (type) { 0 }; }
  6346. #endif
  6347. #else
  6348. #define _SG_CLEAR_ARC_STRUCT(type, item) { _sg_clear(&item, sizeof(item)); }
  6349. #endif
  6350. _SOKOL_PRIVATE void _sg_clear(void* ptr, size_t size) {
  6351. SOKOL_ASSERT(ptr && (size > 0));
  6352. memset(ptr, 0, size);
  6353. }
  6354. _SOKOL_PRIVATE void* _sg_malloc(size_t size) {
  6355. SOKOL_ASSERT(size > 0);
  6356. void* ptr;
  6357. if (_sg.desc.allocator.alloc_fn) {
  6358. ptr = _sg.desc.allocator.alloc_fn(size, _sg.desc.allocator.user_data);
  6359. } else {
  6360. ptr = malloc(size);
  6361. }
  6362. if (0 == ptr) {
  6363. _SG_PANIC(MALLOC_FAILED);
  6364. }
  6365. return ptr;
  6366. }
  6367. _SOKOL_PRIVATE void* _sg_malloc_clear(size_t size) {
  6368. void* ptr = _sg_malloc(size);
  6369. _sg_clear(ptr, size);
  6370. return ptr;
  6371. }
  6372. _SOKOL_PRIVATE void _sg_free(void* ptr) {
  6373. if (_sg.desc.allocator.free_fn) {
  6374. _sg.desc.allocator.free_fn(ptr, _sg.desc.allocator.user_data);
  6375. } else {
  6376. free(ptr);
  6377. }
  6378. }
  6379. _SOKOL_PRIVATE bool _sg_strempty(const _sg_str_t* str) {
  6380. return 0 == str->buf[0];
  6381. }
  6382. _SOKOL_PRIVATE const char* _sg_strptr(const _sg_str_t* str) {
  6383. return &str->buf[0];
  6384. }
  6385. _SOKOL_PRIVATE void _sg_strcpy(_sg_str_t* dst, const char* src) {
  6386. SOKOL_ASSERT(dst);
  6387. if (src) {
  6388. #if defined(_MSC_VER)
  6389. strncpy_s(dst->buf, _SG_STRING_SIZE, src, (_SG_STRING_SIZE-1));
  6390. #else
  6391. strncpy(dst->buf, src, _SG_STRING_SIZE);
  6392. #endif
  6393. dst->buf[_SG_STRING_SIZE-1] = 0;
  6394. } else {
  6395. _sg_clear(dst->buf, _SG_STRING_SIZE);
  6396. }
  6397. }
  6398. // ██████ ██████ ██████ ██
  6399. // ██ ██ ██ ██ ██ ██ ██
  6400. // ██████ ██ ██ ██ ██ ██
  6401. // ██ ██ ██ ██ ██ ██
  6402. // ██ ██████ ██████ ███████
  6403. //
  6404. // >>pool
  6405. _SOKOL_PRIVATE void _sg_pool_init(_sg_pool_t* pool, int num) {
  6406. SOKOL_ASSERT(pool && (num >= 1));
  6407. // slot 0 is reserved for the 'invalid id', so bump the pool size by 1
  6408. pool->size = num + 1;
  6409. pool->queue_top = 0;
  6410. // generation counters indexable by pool slot index, slot 0 is reserved
  6411. size_t gen_ctrs_size = sizeof(uint32_t) * (size_t)pool->size;
  6412. pool->gen_ctrs = (uint32_t*)_sg_malloc_clear(gen_ctrs_size);
  6413. // it's not a bug to only reserve 'num' here
  6414. pool->free_queue = (int*) _sg_malloc_clear(sizeof(int) * (size_t)num);
  6415. // never allocate the zero-th pool item since the invalid id is 0
  6416. for (int i = pool->size-1; i >= 1; i--) {
  6417. pool->free_queue[pool->queue_top++] = i;
  6418. }
  6419. }
  6420. _SOKOL_PRIVATE void _sg_pool_discard(_sg_pool_t* pool) {
  6421. SOKOL_ASSERT(pool);
  6422. SOKOL_ASSERT(pool->free_queue);
  6423. _sg_free(pool->free_queue);
  6424. pool->free_queue = 0;
  6425. SOKOL_ASSERT(pool->gen_ctrs);
  6426. _sg_free(pool->gen_ctrs);
  6427. pool->gen_ctrs = 0;
  6428. pool->size = 0;
  6429. pool->queue_top = 0;
  6430. }
  6431. _SOKOL_PRIVATE int _sg_pool_alloc_index(_sg_pool_t* pool) {
  6432. SOKOL_ASSERT(pool);
  6433. SOKOL_ASSERT(pool->free_queue);
  6434. if (pool->queue_top > 0) {
  6435. int slot_index = pool->free_queue[--pool->queue_top];
  6436. SOKOL_ASSERT((slot_index > 0) && (slot_index < pool->size));
  6437. return slot_index;
  6438. } else {
  6439. // pool exhausted
  6440. return _SG_INVALID_SLOT_INDEX;
  6441. }
  6442. }
  6443. _SOKOL_PRIVATE void _sg_pool_free_index(_sg_pool_t* pool, int slot_index) {
  6444. SOKOL_ASSERT((slot_index > _SG_INVALID_SLOT_INDEX) && (slot_index < pool->size));
  6445. SOKOL_ASSERT(pool);
  6446. SOKOL_ASSERT(pool->free_queue);
  6447. SOKOL_ASSERT(pool->queue_top < pool->size);
  6448. #ifdef SOKOL_DEBUG
  6449. // debug check against double-free
  6450. for (int i = 0; i < pool->queue_top; i++) {
  6451. SOKOL_ASSERT(pool->free_queue[i] != slot_index);
  6452. }
  6453. #endif
  6454. pool->free_queue[pool->queue_top++] = slot_index;
  6455. SOKOL_ASSERT(pool->queue_top <= (pool->size-1));
  6456. }
  6457. _SOKOL_PRIVATE void _sg_slot_reset(_sg_slot_t* slot) {
  6458. SOKOL_ASSERT(slot);
  6459. _sg_clear(slot, sizeof(_sg_slot_t));
  6460. }
  6461. _SOKOL_PRIVATE void _sg_reset_buffer_to_alloc_state(_sg_buffer_t* buf) {
  6462. SOKOL_ASSERT(buf);
  6463. _sg_slot_t slot = buf->slot;
  6464. _sg_clear(buf, sizeof(*buf));
  6465. buf->slot = slot;
  6466. buf->slot.uninit_count += 1;
  6467. buf->slot.state = SG_RESOURCESTATE_ALLOC;
  6468. }
  6469. _SOKOL_PRIVATE void _sg_reset_image_to_alloc_state(_sg_image_t* img) {
  6470. SOKOL_ASSERT(img);
  6471. _sg_slot_t slot = img->slot;
  6472. _sg_clear(img, sizeof(*img));
  6473. img->slot = slot;
  6474. img->slot.uninit_count += 1;
  6475. img->slot.state = SG_RESOURCESTATE_ALLOC;
  6476. }
  6477. _SOKOL_PRIVATE void _sg_reset_sampler_to_alloc_state(_sg_sampler_t* smp) {
  6478. SOKOL_ASSERT(smp);
  6479. _sg_slot_t slot = smp->slot;
  6480. _sg_clear(smp, sizeof(*smp));
  6481. smp->slot = slot;
  6482. smp->slot.uninit_count += 1;
  6483. smp->slot.state = SG_RESOURCESTATE_ALLOC;
  6484. }
  6485. _SOKOL_PRIVATE void _sg_reset_shader_to_alloc_state(_sg_shader_t* shd) {
  6486. SOKOL_ASSERT(shd);
  6487. _sg_slot_t slot = shd->slot;
  6488. _sg_clear(shd, sizeof(*shd));
  6489. shd->slot = slot;
  6490. shd->slot.uninit_count += 1;
  6491. shd->slot.state = SG_RESOURCESTATE_ALLOC;
  6492. }
  6493. _SOKOL_PRIVATE void _sg_reset_pipeline_to_alloc_state(_sg_pipeline_t* pip) {
  6494. SOKOL_ASSERT(pip);
  6495. _sg_slot_t slot = pip->slot;
  6496. _sg_clear(pip, sizeof(*pip));
  6497. pip->slot = slot;
  6498. pip->slot.uninit_count += 1;
  6499. pip->slot.state = SG_RESOURCESTATE_ALLOC;
  6500. }
  6501. _SOKOL_PRIVATE void _sg_reset_view_to_alloc_state(_sg_view_t* view) {
  6502. SOKOL_ASSERT(view);
  6503. _sg_slot_t slot = view->slot;
  6504. _sg_clear(view, sizeof(*view));
  6505. view->slot = slot;
  6506. view->slot.uninit_count += 1;
  6507. view->slot.state = SG_RESOURCESTATE_ALLOC;
  6508. }
  6509. _SOKOL_PRIVATE void _sg_setup_pools(_sg_pools_t* p, const sg_desc* desc) {
  6510. SOKOL_ASSERT(p);
  6511. SOKOL_ASSERT(desc);
  6512. // note: the pools here will have an additional item, since slot 0 is reserved
  6513. SOKOL_ASSERT((desc->buffer_pool_size > 0) && (desc->buffer_pool_size < _SG_MAX_POOL_SIZE));
  6514. _sg_pool_init(&p->buffer_pool, desc->buffer_pool_size);
  6515. size_t buffer_pool_byte_size = sizeof(_sg_buffer_t) * (size_t)p->buffer_pool.size;
  6516. p->buffers = (_sg_buffer_t*) _sg_malloc_clear(buffer_pool_byte_size);
  6517. SOKOL_ASSERT((desc->image_pool_size > 0) && (desc->image_pool_size < _SG_MAX_POOL_SIZE));
  6518. _sg_pool_init(&p->image_pool, desc->image_pool_size);
  6519. size_t image_pool_byte_size = sizeof(_sg_image_t) * (size_t)p->image_pool.size;
  6520. p->images = (_sg_image_t*) _sg_malloc_clear(image_pool_byte_size);
  6521. SOKOL_ASSERT((desc->sampler_pool_size > 0) && (desc->sampler_pool_size < _SG_MAX_POOL_SIZE));
  6522. _sg_pool_init(&p->sampler_pool, desc->sampler_pool_size);
  6523. size_t sampler_pool_byte_size = sizeof(_sg_sampler_t) * (size_t)p->sampler_pool.size;
  6524. p->samplers = (_sg_sampler_t*) _sg_malloc_clear(sampler_pool_byte_size);
  6525. SOKOL_ASSERT((desc->shader_pool_size > 0) && (desc->shader_pool_size < _SG_MAX_POOL_SIZE));
  6526. _sg_pool_init(&p->shader_pool, desc->shader_pool_size);
  6527. size_t shader_pool_byte_size = sizeof(_sg_shader_t) * (size_t)p->shader_pool.size;
  6528. p->shaders = (_sg_shader_t*) _sg_malloc_clear(shader_pool_byte_size);
  6529. SOKOL_ASSERT((desc->pipeline_pool_size > 0) && (desc->pipeline_pool_size < _SG_MAX_POOL_SIZE));
  6530. _sg_pool_init(&p->pipeline_pool, desc->pipeline_pool_size);
  6531. size_t pipeline_pool_byte_size = sizeof(_sg_pipeline_t) * (size_t)p->pipeline_pool.size;
  6532. p->pipelines = (_sg_pipeline_t*) _sg_malloc_clear(pipeline_pool_byte_size);
  6533. SOKOL_ASSERT((desc->view_pool_size > 0) && (desc->view_pool_size < _SG_MAX_POOL_SIZE));
  6534. _sg_pool_init(&p->view_pool, desc->view_pool_size);
  6535. size_t view_pool_byte_size = sizeof(_sg_view_t) * (size_t)p->view_pool.size;
  6536. p->views = (_sg_view_t*) _sg_malloc_clear(view_pool_byte_size);
  6537. }
  6538. _SOKOL_PRIVATE void _sg_discard_pools(_sg_pools_t* p) {
  6539. SOKOL_ASSERT(p);
  6540. _sg_free(p->views); p->views = 0;
  6541. _sg_free(p->pipelines); p->pipelines = 0;
  6542. _sg_free(p->shaders); p->shaders = 0;
  6543. _sg_free(p->samplers); p->samplers = 0;
  6544. _sg_free(p->images); p->images = 0;
  6545. _sg_free(p->buffers); p->buffers = 0;
  6546. _sg_pool_discard(&p->view_pool);
  6547. _sg_pool_discard(&p->pipeline_pool);
  6548. _sg_pool_discard(&p->shader_pool);
  6549. _sg_pool_discard(&p->sampler_pool);
  6550. _sg_pool_discard(&p->image_pool);
  6551. _sg_pool_discard(&p->buffer_pool);
  6552. }
  6553. /* allocate the slot at slot_index:
  6554. - bump the slot's generation counter
  6555. - create a resource id from the generation counter and slot index
  6556. - set the slot's id to this id
  6557. - set the slot's state to ALLOC
  6558. - return the resource id
  6559. */
  6560. _SOKOL_PRIVATE uint32_t _sg_slot_alloc(_sg_pool_t* pool, _sg_slot_t* slot, int slot_index) {
  6561. /* FIXME: add handling for an overflowing generation counter,
  6562. for now, just overflow (another option is to disable
  6563. the slot)
  6564. */
  6565. SOKOL_ASSERT(pool && pool->gen_ctrs);
  6566. SOKOL_ASSERT((slot_index > _SG_INVALID_SLOT_INDEX) && (slot_index < pool->size));
  6567. SOKOL_ASSERT(slot->id == SG_INVALID_ID);
  6568. SOKOL_ASSERT(slot->state == SG_RESOURCESTATE_INITIAL);
  6569. uint32_t ctr = ++pool->gen_ctrs[slot_index];
  6570. slot->id = (ctr<<_SG_SLOT_SHIFT)|(slot_index & _SG_SLOT_MASK);
  6571. slot->state = SG_RESOURCESTATE_ALLOC;
  6572. return slot->id;
  6573. }
  6574. // extract slot index from id
  6575. _SOKOL_PRIVATE int _sg_slot_index(uint32_t id) {
  6576. int slot_index = (int) (id & _SG_SLOT_MASK);
  6577. SOKOL_ASSERT(_SG_INVALID_SLOT_INDEX != slot_index);
  6578. return slot_index;
  6579. }
  6580. // returns pointer to resource by id without matching id check
  6581. _SOKOL_PRIVATE _sg_buffer_t* _sg_buffer_at(uint32_t buf_id) {
  6582. SOKOL_ASSERT(SG_INVALID_ID != buf_id);
  6583. int slot_index = _sg_slot_index(buf_id);
  6584. SOKOL_ASSERT((slot_index > _SG_INVALID_SLOT_INDEX) && (slot_index < _sg.pools.buffer_pool.size));
  6585. return &_sg.pools.buffers[slot_index];
  6586. }
  6587. _SOKOL_PRIVATE _sg_image_t* _sg_image_at(uint32_t img_id) {
  6588. SOKOL_ASSERT(SG_INVALID_ID != img_id);
  6589. int slot_index = _sg_slot_index(img_id);
  6590. SOKOL_ASSERT((slot_index > _SG_INVALID_SLOT_INDEX) && (slot_index < _sg.pools.image_pool.size));
  6591. return &_sg.pools.images[slot_index];
  6592. }
  6593. _SOKOL_PRIVATE _sg_sampler_t* _sg_sampler_at(uint32_t smp_id) {
  6594. SOKOL_ASSERT(SG_INVALID_ID != smp_id);
  6595. int slot_index = _sg_slot_index(smp_id);
  6596. SOKOL_ASSERT((slot_index > _SG_INVALID_SLOT_INDEX) && (slot_index < _sg.pools.sampler_pool.size));
  6597. return &_sg.pools.samplers[slot_index];
  6598. }
  6599. _SOKOL_PRIVATE _sg_shader_t* _sg_shader_at(uint32_t shd_id) {
  6600. SOKOL_ASSERT(SG_INVALID_ID != shd_id);
  6601. int slot_index = _sg_slot_index(shd_id);
  6602. SOKOL_ASSERT((slot_index > _SG_INVALID_SLOT_INDEX) && (slot_index < _sg.pools.shader_pool.size));
  6603. return &_sg.pools.shaders[slot_index];
  6604. }
  6605. _SOKOL_PRIVATE _sg_pipeline_t* _sg_pipeline_at(uint32_t pip_id) {
  6606. SOKOL_ASSERT(SG_INVALID_ID != pip_id);
  6607. int slot_index = _sg_slot_index(pip_id);
  6608. SOKOL_ASSERT((slot_index > _SG_INVALID_SLOT_INDEX) && (slot_index < _sg.pools.pipeline_pool.size));
  6609. return &_sg.pools.pipelines[slot_index];
  6610. }
  6611. _SOKOL_PRIVATE _sg_view_t* _sg_view_at(uint32_t view_id) {
  6612. SOKOL_ASSERT(SG_INVALID_ID != view_id);
  6613. int slot_index = _sg_slot_index(view_id);
  6614. SOKOL_ASSERT((slot_index > _SG_INVALID_SLOT_INDEX) && (slot_index < _sg.pools.view_pool.size));
  6615. return &_sg.pools.views[slot_index];
  6616. }
  6617. // returns pointer to resource with matching id check, may return 0
  6618. _SOKOL_PRIVATE _sg_buffer_t* _sg_lookup_buffer(uint32_t buf_id) {
  6619. if (SG_INVALID_ID != buf_id) {
  6620. _sg_buffer_t* buf = _sg_buffer_at(buf_id);
  6621. if (buf->slot.id == buf_id) {
  6622. return buf;
  6623. }
  6624. }
  6625. return 0;
  6626. }
  6627. _SOKOL_PRIVATE _sg_image_t* _sg_lookup_image(uint32_t img_id) {
  6628. if (SG_INVALID_ID != img_id) {
  6629. _sg_image_t* img = _sg_image_at(img_id);
  6630. if (img->slot.id == img_id) {
  6631. return img;
  6632. }
  6633. }
  6634. return 0;
  6635. }
  6636. _SOKOL_PRIVATE _sg_sampler_t* _sg_lookup_sampler(uint32_t smp_id) {
  6637. if (SG_INVALID_ID != smp_id) {
  6638. _sg_sampler_t* smp = _sg_sampler_at(smp_id);
  6639. if (smp->slot.id == smp_id) {
  6640. return smp;
  6641. }
  6642. }
  6643. return 0;
  6644. }
  6645. _SOKOL_PRIVATE _sg_shader_t* _sg_lookup_shader(uint32_t shd_id) {
  6646. if (SG_INVALID_ID != shd_id) {
  6647. _sg_shader_t* shd = _sg_shader_at(shd_id);
  6648. if (shd->slot.id == shd_id) {
  6649. return shd;
  6650. }
  6651. }
  6652. return 0;
  6653. }
  6654. _SOKOL_PRIVATE _sg_pipeline_t* _sg_lookup_pipeline(uint32_t pip_id) {
  6655. if (SG_INVALID_ID != pip_id) {
  6656. _sg_pipeline_t* pip = _sg_pipeline_at(pip_id);
  6657. if (pip->slot.id == pip_id) {
  6658. return pip;
  6659. }
  6660. }
  6661. return 0;
  6662. }
  6663. _SOKOL_PRIVATE _sg_view_t* _sg_lookup_view(uint32_t view_id) {
  6664. if (SG_INVALID_ID != view_id) {
  6665. _sg_view_t* view = _sg_view_at(view_id);
  6666. if (view->slot.id == view_id) {
  6667. return view;
  6668. }
  6669. }
  6670. return 0;
  6671. }
  6672. // ████████ ██████ █████ ██████ ██ ██
  6673. // ██ ██ ██ ██ ██ ██ ██ ██
  6674. // ██ ██████ ███████ ██ █████
  6675. // ██ ██ ██ ██ ██ ██ ██ ██
  6676. // ██ ██ ██ ██ ██ ██████ ██ ██
  6677. //
  6678. // >>track
  6679. _SOKOL_PRIVATE void _sg_track_init(_sg_track_t* track, int num_slots) {
  6680. SOKOL_ASSERT(track && (num_slots > 0));
  6681. _sg_clear(track, sizeof(_sg_track_t));
  6682. track->num_slots = num_slots;
  6683. track->slots = (uint32_t*)_sg_malloc_clear((size_t)num_slots * sizeof(uint32_t));
  6684. track->occupy_num_bytes = _sg_roundup_u32((uint32_t)num_slots, 8) >> 3;
  6685. track->occupy_bits = (uint8_t*)_sg_malloc_clear(track->occupy_num_bytes);
  6686. }
  6687. _SOKOL_PRIVATE void _sg_track_discard(_sg_track_t* track) {
  6688. SOKOL_ASSERT(track);
  6689. if (track->slots) {
  6690. _sg_free(track->slots);
  6691. track->slots = 0;
  6692. }
  6693. if (track->occupy_bits) {
  6694. _sg_free(track->occupy_bits);
  6695. track->occupy_num_bytes = 0;
  6696. track->occupy_bits = 0;
  6697. }
  6698. track->num_slots = 0;
  6699. track->cur_slot = 0;
  6700. }
  6701. _SOKOL_PRIVATE void _sg_track_reset(_sg_track_t* track) {
  6702. SOKOL_ASSERT(track && track->slots && track->occupy_bits);
  6703. track->cur_slot = 0;
  6704. _sg_clear(track->occupy_bits, track->occupy_num_bytes);
  6705. }
  6706. _SOKOL_PRIVATE int _sg_track_occupy_index(int slot_index) {
  6707. const int occupy_index = slot_index >> 3;
  6708. return occupy_index;
  6709. }
  6710. _SOKOL_PRIVATE uint8_t _sg_track_occupy_mask(int slot_index) {
  6711. return (uint8_t)(1 << (slot_index & 7));
  6712. }
  6713. _SOKOL_PRIVATE void _sg_track_add(_sg_track_t* track, uint32_t id) {
  6714. SOKOL_ASSERT(track && track->slots && track->occupy_bits);
  6715. SOKOL_ASSERT(id != SG_INVALID_ID);
  6716. const int slot_index = _sg_slot_index(id);
  6717. const int occupy_index = _sg_track_occupy_index(slot_index);
  6718. SOKOL_ASSERT((uint32_t)occupy_index < track->occupy_num_bytes);
  6719. const uint8_t occupy_mask = _sg_track_occupy_mask(slot_index);
  6720. // don't record the same resource twice
  6721. if (0 == (track->occupy_bits[occupy_index] & occupy_mask)) {
  6722. SOKOL_ASSERT(track->cur_slot < track->num_slots);
  6723. track->slots[track->cur_slot++] = id;
  6724. track->occupy_bits[occupy_index] |= occupy_mask;
  6725. }
  6726. }
  6727. _SOKOL_PRIVATE void _sg_track_remove(_sg_track_t* track, uint32_t id) {
  6728. SOKOL_ASSERT(track && track->slots && track->occupy_bits);
  6729. SOKOL_ASSERT(id != SG_INVALID_ID);
  6730. const int slot_index = _sg_slot_index(id);
  6731. const int occupy_index = _sg_track_occupy_index(slot_index);
  6732. const uint8_t occupy_mask = _sg_track_occupy_mask(slot_index);
  6733. if (track->occupy_bits[occupy_index] & occupy_mask) {
  6734. track->occupy_bits[occupy_index] &= ~occupy_mask;
  6735. // remove tracked id from the slots array
  6736. for (int i = 0; i < track->cur_slot; i++) {
  6737. if (id == track->slots[i]) {
  6738. SOKOL_ASSERT(track->cur_slot > 0);
  6739. track->slots[i] = track->slots[--track->cur_slot];
  6740. break;
  6741. }
  6742. }
  6743. }
  6744. }
  6745. // ██████ ███████ ███████ ███████
  6746. // ██ ██ ██ ██ ██
  6747. // ██████ █████ █████ ███████
  6748. // ██ ██ ██ ██ ██
  6749. // ██ ██ ███████ ██ ███████
  6750. //
  6751. // >>refs
  6752. _SOKOL_PRIVATE _sg_sref_t _sg_sref(const _sg_slot_t* slot) {
  6753. _SG_STRUCT(_sg_sref_t, sref);
  6754. if (slot) {
  6755. sref.id = slot->id;
  6756. sref.uninit_count = slot->uninit_count;
  6757. }
  6758. return sref;
  6759. }
  6760. _SOKOL_PRIVATE bool _sg_sref_slot_eql(const _sg_sref_t* sref, const _sg_slot_t* slot) {
  6761. SOKOL_ASSERT(sref && slot);
  6762. return (sref->id == slot->id) && (sref->uninit_count == slot->uninit_count);
  6763. }
  6764. _SOKOL_PRIVATE bool _sg_sref_sref_eql(const _sg_sref_t* sref0, const _sg_sref_t* sref1) {
  6765. SOKOL_ASSERT(sref0 && sref1);
  6766. return (sref0->id == sref1->id) && (sref0->uninit_count == sref1->uninit_count);
  6767. }
  6768. _SOKOL_PRIVATE _sg_buffer_ref_t _sg_buffer_ref(_sg_buffer_t* buf_or_null) {
  6769. _SG_STRUCT(_sg_buffer_ref_t, ref);
  6770. if (buf_or_null) {
  6771. _sg_buffer_t* buf = buf_or_null;
  6772. SOKOL_ASSERT(buf->slot.id != SG_INVALID_ID);
  6773. ref.ptr = buf;
  6774. ref.sref = _sg_sref(&buf->slot);
  6775. }
  6776. return ref;
  6777. }
  6778. _SOKOL_PRIVATE _sg_image_ref_t _sg_image_ref(_sg_image_t* img_or_null) {
  6779. _SG_STRUCT(_sg_image_ref_t, ref);
  6780. if (img_or_null) {
  6781. _sg_image_t* img = img_or_null;
  6782. SOKOL_ASSERT(img->slot.id != SG_INVALID_ID);
  6783. ref.ptr = img;
  6784. ref.sref = _sg_sref(&img->slot);
  6785. }
  6786. return ref;
  6787. }
  6788. _SOKOL_PRIVATE _sg_sampler_ref_t _sg_sampler_ref(_sg_sampler_t* smp_or_null) {
  6789. _SG_STRUCT(_sg_sampler_ref_t, ref);
  6790. if (smp_or_null) {
  6791. _sg_sampler_t* smp = smp_or_null;
  6792. SOKOL_ASSERT(smp->slot.id != SG_INVALID_ID);
  6793. ref.ptr = smp;
  6794. ref.sref = _sg_sref(&smp->slot);
  6795. }
  6796. return ref;
  6797. }
  6798. _SOKOL_PRIVATE _sg_shader_ref_t _sg_shader_ref(_sg_shader_t* shd_or_null) {
  6799. _SG_STRUCT(_sg_shader_ref_t, ref);
  6800. if (shd_or_null) {
  6801. _sg_shader_t* shd = shd_or_null;
  6802. SOKOL_ASSERT(shd->slot.id != SG_INVALID_ID);
  6803. ref.ptr = shd;
  6804. ref.sref = _sg_sref(&shd->slot);
  6805. }
  6806. return ref;
  6807. }
  6808. _SOKOL_PRIVATE _sg_pipeline_ref_t _sg_pipeline_ref(_sg_pipeline_t* pip_or_null) {
  6809. _SG_STRUCT(_sg_pipeline_ref_t, ref);
  6810. if (pip_or_null) {
  6811. _sg_pipeline_t* pip = pip_or_null;
  6812. SOKOL_ASSERT(pip->slot.id != SG_INVALID_ID);
  6813. ref.ptr = pip;
  6814. ref.sref = _sg_sref(&pip->slot);
  6815. }
  6816. return ref;
  6817. }
  6818. _SOKOL_PRIVATE _sg_view_ref_t _sg_view_ref(_sg_view_t* view_or_null) {
  6819. _SG_STRUCT(_sg_view_ref_t, ref);
  6820. if (view_or_null) {
  6821. _sg_view_t* view = view_or_null;
  6822. SOKOL_ASSERT(view->slot.id != SG_INVALID_ID);
  6823. ref.ptr = view;
  6824. ref.sref = _sg_sref(&view->slot);
  6825. }
  6826. return ref;
  6827. }
  6828. #define _SG_IMPL_RES_EQL(NAME,REF,RES) _SOKOL_PRIVATE bool NAME(const REF* ref, const RES* res) { SOKOL_ASSERT(ref && res); return _sg_sref_slot_eql(&ref->sref, &res->slot); }
  6829. _SG_IMPL_RES_EQL(_sg_buffer_ref_eql, _sg_buffer_ref_t, _sg_buffer_t)
  6830. _SG_IMPL_RES_EQL(_sg_image_ref_eql, _sg_image_ref_t, _sg_image_t)
  6831. _SG_IMPL_RES_EQL(_sg_sampler_ref_eql, _sg_sampler_ref_t, _sg_sampler_t)
  6832. _SG_IMPL_RES_EQL(_sg_shader_ref_eql, _sg_shader_ref_t, _sg_shader_t)
  6833. _SG_IMPL_RES_EQL(_sg_pipeline_ref_eql, _sg_pipeline_ref_t, _sg_pipeline_t)
  6834. _SG_IMPL_RES_EQL(_sg_view_ref_eql, _sg_view_ref_t, _sg_view_t)
  6835. #define _SG_IMPL_RES_NULL(NAME,REF) _SOKOL_PRIVATE bool NAME(const REF* ref) { SOKOL_ASSERT(ref); return SG_INVALID_ID == ref->sref.id; }
  6836. _SG_IMPL_RES_NULL(_sg_buffer_ref_null, _sg_buffer_ref_t)
  6837. _SG_IMPL_RES_NULL(_sg_image_ref_null, _sg_image_ref_t)
  6838. _SG_IMPL_RES_NULL(_sg_sampler_ref_null, _sg_sampler_ref_t)
  6839. _SG_IMPL_RES_NULL(_sg_shader_ref_null, _sg_shader_ref_t)
  6840. _SG_IMPL_RES_NULL(_sg_pipeline_ref_null, _sg_pipeline_ref_t)
  6841. _SG_IMPL_RES_NULL(_sg_view_ref_null, _sg_view_ref_t)
  6842. #define _SG_IMPL_RES_ALIVE(NAME,REF) _SOKOL_PRIVATE bool NAME(const REF* ref) { SOKOL_ASSERT(ref); return ref->ptr && _sg_sref_slot_eql(&ref->sref, &ref->ptr->slot); }
  6843. _SG_IMPL_RES_ALIVE(_sg_buffer_ref_alive, _sg_buffer_ref_t)
  6844. _SG_IMPL_RES_ALIVE(_sg_image_ref_alive, _sg_image_ref_t)
  6845. _SG_IMPL_RES_ALIVE(_sg_sampler_ref_alive, _sg_sampler_ref_t)
  6846. _SG_IMPL_RES_ALIVE(_sg_shader_ref_alive, _sg_shader_ref_t)
  6847. _SG_IMPL_RES_ALIVE(_sg_pipeline_ref_alive, _sg_pipeline_ref_t)
  6848. _SG_IMPL_RES_ALIVE(_sg_view_ref_alive, _sg_view_ref_t)
  6849. #define _SG_IMPL_RES_VALID(NAME,REF) _SOKOL_PRIVATE bool NAME(const REF* ref) { SOKOL_ASSERT(ref); return ref->ptr && _sg_sref_slot_eql(&ref->sref, &ref->ptr->slot) && (ref->ptr->slot.state == SG_RESOURCESTATE_VALID); }
  6850. _SG_IMPL_RES_VALID(_sg_buffer_ref_valid, _sg_buffer_ref_t)
  6851. _SG_IMPL_RES_VALID(_sg_image_ref_valid, _sg_image_ref_t)
  6852. _SG_IMPL_RES_VALID(_sg_sampler_ref_valid, _sg_sampler_ref_t)
  6853. _SG_IMPL_RES_VALID(_sg_shader_ref_valid, _sg_shader_ref_t)
  6854. _SG_IMPL_RES_VALID(_sg_pipeline_ref_valid, _sg_pipeline_ref_t)
  6855. _SG_IMPL_RES_VALID(_sg_view_ref_valid, _sg_view_ref_t)
  6856. #define _SG_IMPL_RES_PTR(NAME,REF,RES) _SOKOL_PRIVATE RES* NAME(const REF* ref) { SOKOL_ASSERT(ref && ref->ptr && _sg_sref_slot_eql(&ref->sref, &ref->ptr->slot)); return ref->ptr; }
  6857. _SG_IMPL_RES_PTR(_sg_buffer_ref_ptr, _sg_buffer_ref_t, _sg_buffer_t)
  6858. _SG_IMPL_RES_PTR(_sg_image_ref_ptr, _sg_image_ref_t, _sg_image_t)
  6859. _SG_IMPL_RES_PTR(_sg_sampler_ref_ptr, _sg_sampler_ref_t, _sg_sampler_t)
  6860. _SG_IMPL_RES_PTR(_sg_shader_ref_ptr, _sg_shader_ref_t, _sg_shader_t)
  6861. _SG_IMPL_RES_PTR(_sg_pipeline_ref_ptr, _sg_pipeline_ref_t, _sg_pipeline_t)
  6862. _SG_IMPL_RES_PTR(_sg_view_ref_ptr, _sg_view_ref_t, _sg_view_t)
  6863. #define _SG_IMPL_RES_PTR_OR_NULL(NAME,REF,RES) _SOKOL_PRIVATE RES* NAME(const REF* ref) { SOKOL_ASSERT(ref); if ((SG_INVALID_ID != ref->sref.id) && _sg_sref_slot_eql(&ref->sref, &ref->ptr->slot)) { return ref->ptr; } else { return 0; } }
  6864. _SG_IMPL_RES_PTR_OR_NULL(_sg_buffer_ref_ptr_or_null, _sg_buffer_ref_t, _sg_buffer_t)
  6865. _SG_IMPL_RES_PTR_OR_NULL(_sg_image_ref_ptr_or_null, _sg_image_ref_t, _sg_image_t)
  6866. _SG_IMPL_RES_PTR_OR_NULL(_sg_sampler_ref_ptr_or_null, _sg_sampler_ref_t, _sg_sampler_t)
  6867. _SG_IMPL_RES_PTR_OR_NULL(_sg_shader_ref_ptr_or_null, _sg_shader_ref_t, _sg_shader_t)
  6868. _SG_IMPL_RES_PTR_OR_NULL(_sg_pipeline_ref_ptr_or_null, _sg_pipeline_ref_t, _sg_pipeline_t)
  6869. _SG_IMPL_RES_PTR_OR_NULL(_sg_view_ref_ptr_or_null, _sg_view_ref_t, _sg_view_t)
  6870. // ██ ██ ███████ ██ ██████ ███████ ██████ ███████
  6871. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  6872. // ███████ █████ ██ ██████ █████ ██████ ███████
  6873. // ██ ██ ██ ██ ██ ██ ██ ██ ██
  6874. // ██ ██ ███████ ███████ ██ ███████ ██ ██ ███████
  6875. //
  6876. // >>helpers
  6877. // helper macros
  6878. #define _sg_def(val, def) (((val) == 0) ? (def) : (val))
  6879. #define _sg_def_flt(val, def) (((val) == 0.0f) ? (def) : (val))
  6880. #define _sg_min(a,b) (((a)<(b))?(a):(b))
  6881. #define _sg_max(a,b) (((a)>(b))?(a):(b))
  6882. #define _sg_clamp(v,v0,v1) (((v)<(v0))?(v0):(((v)>(v1))?(v1):(v)))
  6883. #define _sg_fequal(val,cmp,delta) ((((val)-(cmp))> -(delta))&&(((val)-(cmp))<(delta)))
  6884. #define _sg_ispow2(val) ((val&(val-1))==0)
  6885. #define _sg_stats_add(key,val) {if(_sg.stats_enabled){ _sg.stats.cur_frame.key+=val;}}
  6886. #define _sg_stats_inc(key) {if(_sg.stats_enabled){ _sg.stats.cur_frame.key++;}}
  6887. #define _sg_resource_stats_inc(key) {if(_sg.stats_enabled){ _sg.stats.cur_frame.key++; _sg.stats.total.key++;}}
  6888. _SOKOL_PRIVATE void _sg_update_alive_free_resource_stats(sg_total_resource_stats* stats, const _sg_pool_t* pool) {
  6889. SOKOL_ASSERT(stats && pool);
  6890. stats->alive = (uint32_t) ((pool->size - 1) - pool->queue_top);
  6891. stats->free = (uint32_t) pool->queue_top;
  6892. }
  6893. _SOKOL_PRIVATE void _sg_update_stats(void) {
  6894. _sg.stats.cur_frame.frame_index = _sg.frame_index;
  6895. _sg.stats.prev_frame = _sg.stats.cur_frame;
  6896. _sg_clear(&_sg.stats.cur_frame, sizeof(_sg.stats.cur_frame));
  6897. }
  6898. _SOKOL_PRIVATE uint32_t _sg_align_u32(uint32_t val, uint32_t align) {
  6899. SOKOL_ASSERT((align > 0) && ((align & (align - 1)) == 0));
  6900. return (val + (align - 1)) & ~(align - 1);
  6901. }
  6902. _SOKOL_PRIVATE _sg_recti_t _sg_clipi(int x, int y, int w, int h, int clip_width, int clip_height) {
  6903. x = _sg_min(_sg_max(0, x), clip_width-1);
  6904. y = _sg_min(_sg_max(0, y), clip_height-1);
  6905. if ((x + w) > clip_width) {
  6906. w = clip_width - x;
  6907. }
  6908. if ((y + h) > clip_height) {
  6909. h = clip_height - y;
  6910. }
  6911. w = _sg_max(w, 1);
  6912. h = _sg_max(h, 1);
  6913. const _sg_recti_t res = { x, y, w, h };
  6914. return res;
  6915. }
  6916. // return size of a mipmap level
  6917. _SOKOL_PRIVATE int _sg_miplevel_dim(int base_dim, int mip_level) {
  6918. return _sg_max(base_dim >> mip_level, 1);
  6919. }
  6920. _SOKOL_PRIVATE bool _sg_image_view_alive(const _sg_view_t* view) {
  6921. return view && _sg_image_ref_alive(&view->cmn.img.ref);
  6922. }
  6923. _SOKOL_PRIVATE _sg_dimi_t _sg_image_view_dim(const _sg_view_t* view) {
  6924. SOKOL_ASSERT(view);
  6925. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  6926. SOKOL_ASSERT((img->cmn.width > 0) && (img->cmn.height > 0));
  6927. _SG_STRUCT(_sg_dimi_t, res);
  6928. res.width = _sg_miplevel_dim(img->cmn.width, view->cmn.img.mip_level);
  6929. res.height = _sg_miplevel_dim(img->cmn.height, view->cmn.img.mip_level);
  6930. return res;
  6931. }
  6932. _SOKOL_PRIVATE bool _sg_attachments_empty(const sg_attachments* atts) {
  6933. SOKOL_ASSERT(atts);
  6934. for (size_t i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  6935. if (atts->colors[i].id != SG_INVALID_ID) {
  6936. return false;
  6937. }
  6938. if (atts->resolves[i].id != SG_INVALID_ID) {
  6939. return false;
  6940. }
  6941. }
  6942. if (atts->depth_stencil.id != SG_INVALID_ID) {
  6943. return false;
  6944. }
  6945. return true;
  6946. }
  6947. _SOKOL_PRIVATE _sg_attachments_ptrs_t _sg_attachments_ptrs(const sg_attachments* atts) {
  6948. SOKOL_ASSERT(atts);
  6949. _SG_STRUCT(_sg_attachments_ptrs_t, res);
  6950. res.empty = true;
  6951. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  6952. if (atts->colors[i].id != SG_INVALID_ID) {
  6953. res.empty = false;
  6954. res.num_color_views += 1;
  6955. res.color_views[i] = _sg_lookup_view(atts->colors[i].id);
  6956. }
  6957. if (atts->resolves[i].id != SG_INVALID_ID) {
  6958. SOKOL_ASSERT(atts->colors[i].id != SG_INVALID_ID);
  6959. res.empty = false;
  6960. res.resolve_views[i] = _sg_lookup_view(atts->resolves[i].id);
  6961. }
  6962. }
  6963. if (atts->depth_stencil.id != SG_INVALID_ID) {
  6964. res.empty = false;
  6965. res.ds_view = _sg_lookup_view(atts->depth_stencil.id);
  6966. }
  6967. return res;
  6968. }
  6969. _SOKOL_PRIVATE _sg_dimi_t _sg_attachments_dim(const _sg_attachments_ptrs_t* atts_ptrs) {
  6970. if (atts_ptrs->ds_view) {
  6971. return _sg_image_view_dim(atts_ptrs->ds_view);
  6972. } else {
  6973. SOKOL_ASSERT(atts_ptrs->color_views[0]);
  6974. return _sg_image_view_dim(atts_ptrs->color_views[0]);
  6975. }
  6976. }
  6977. _SOKOL_PRIVATE bool _sg_attachments_alive(const _sg_attachments_ptrs_t* atts_ptrs) {
  6978. for (int i = 0; i < atts_ptrs->num_color_views; i++) {
  6979. if (!_sg_image_view_alive(atts_ptrs->color_views[i])) {
  6980. return false;
  6981. }
  6982. if (atts_ptrs->resolve_views[i] && !_sg_image_view_alive(atts_ptrs->resolve_views[i])) {
  6983. return false;
  6984. }
  6985. }
  6986. if (atts_ptrs->ds_view && !_sg_image_view_alive(atts_ptrs->ds_view)) {
  6987. return false;
  6988. }
  6989. return true;
  6990. }
  6991. _SOKOL_PRIVATE void _sg_buffer_common_init(_sg_buffer_common_t* cmn, const sg_buffer_desc* desc) {
  6992. cmn->size = (int)desc->size;
  6993. cmn->append_pos = 0;
  6994. cmn->append_overflow = false;
  6995. cmn->update_frame_index = 0;
  6996. cmn->append_frame_index = 0;
  6997. cmn->num_slots = desc->usage.immutable ? 1 : SG_NUM_INFLIGHT_FRAMES;
  6998. cmn->active_slot = 0;
  6999. cmn->usage = desc->usage;
  7000. }
  7001. _SOKOL_PRIVATE void _sg_image_common_init(_sg_image_common_t* cmn, const sg_image_desc* desc) {
  7002. cmn->upd_frame_index = 0;
  7003. cmn->num_slots = desc->usage.immutable ? 1 : SG_NUM_INFLIGHT_FRAMES;
  7004. cmn->active_slot = 0;
  7005. cmn->type = desc->type;
  7006. cmn->width = desc->width;
  7007. cmn->height = desc->height;
  7008. cmn->num_slices = desc->num_slices;
  7009. cmn->num_mipmaps = desc->num_mipmaps;
  7010. cmn->usage = desc->usage;
  7011. cmn->pixel_format = desc->pixel_format;
  7012. cmn->sample_count = desc->sample_count;
  7013. }
  7014. _SOKOL_PRIVATE void _sg_sampler_common_init(_sg_sampler_common_t* cmn, const sg_sampler_desc* desc) {
  7015. cmn->min_filter = desc->min_filter;
  7016. cmn->mag_filter = desc->mag_filter;
  7017. cmn->mipmap_filter = desc->mipmap_filter;
  7018. cmn->wrap_u = desc->wrap_u;
  7019. cmn->wrap_v = desc->wrap_v;
  7020. cmn->wrap_w = desc->wrap_w;
  7021. cmn->min_lod = desc->min_lod;
  7022. cmn->max_lod = desc->max_lod;
  7023. cmn->border_color = desc->border_color;
  7024. cmn->compare = desc->compare;
  7025. cmn->max_anisotropy = desc->max_anisotropy;
  7026. }
  7027. _SOKOL_PRIVATE void _sg_shader_common_init(_sg_shader_common_t* cmn, const sg_shader_desc* desc) {
  7028. cmn->is_compute = desc->compute_func.source || desc->compute_func.bytecode.ptr;
  7029. for (size_t i = 0; i < SG_MAX_VERTEX_ATTRIBUTES; i++) {
  7030. cmn->attrs[i].base_type = desc->attrs[i].base_type;
  7031. }
  7032. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  7033. const sg_shader_uniform_block* src = &desc->uniform_blocks[i];
  7034. _sg_shader_uniform_block_t* dst = &cmn->uniform_blocks[i];
  7035. if (src->stage != SG_SHADERSTAGE_NONE) {
  7036. cmn->required_bindings_and_uniforms |= (1 << i);
  7037. dst->stage = src->stage;
  7038. dst->size = src->size;
  7039. }
  7040. }
  7041. const uint32_t required_bindings_flag = (1 << SG_MAX_UNIFORMBLOCK_BINDSLOTS);
  7042. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  7043. _sg_shader_view_t* dst = &cmn->views[i];
  7044. if (desc->views[i].texture.stage != SG_SHADERSTAGE_NONE) {
  7045. const sg_shader_texture_view* src = &desc->views[i].texture;
  7046. dst->stage = src->stage;
  7047. dst->view_type = SG_VIEWTYPE_TEXTURE;
  7048. dst->image_type = src->image_type;
  7049. dst->sample_type = src->sample_type;
  7050. dst->multisampled = src->multisampled;
  7051. } else if (desc->views[i].storage_buffer.stage != SG_SHADERSTAGE_NONE) {
  7052. const sg_shader_storage_buffer_view* src = &desc->views[i].storage_buffer;
  7053. cmn->required_bindings_and_uniforms |= required_bindings_flag;
  7054. dst->stage = src->stage;
  7055. dst->view_type = SG_VIEWTYPE_STORAGEBUFFER;
  7056. dst->sbuf_readonly = src->readonly;
  7057. } else if (desc->views[i].storage_image.stage != SG_SHADERSTAGE_NONE) {
  7058. const sg_shader_storage_image_view* src = &desc->views[i].storage_image;
  7059. cmn->required_bindings_and_uniforms |= required_bindings_flag;
  7060. dst->stage = src->stage;
  7061. dst->view_type = SG_VIEWTYPE_STORAGEIMAGE;
  7062. dst->image_type = src->image_type;
  7063. dst->access_format = src->access_format;
  7064. dst->simg_writeonly = src->writeonly;
  7065. }
  7066. }
  7067. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  7068. const sg_shader_sampler* src = &desc->samplers[i];
  7069. _sg_shader_sampler_t* dst = &cmn->samplers[i];
  7070. if (src->stage != SG_SHADERSTAGE_NONE) {
  7071. cmn->required_bindings_and_uniforms |= required_bindings_flag;
  7072. dst->stage = src->stage;
  7073. dst->sampler_type = src->sampler_type;
  7074. }
  7075. }
  7076. for (size_t i = 0; i < SG_MAX_TEXTURE_SAMPLER_PAIRS; i++) {
  7077. const sg_shader_texture_sampler_pair* src = &desc->texture_sampler_pairs[i];
  7078. _sg_shader_texture_sampler_t* dst = &cmn->texture_samplers[i];
  7079. if (src->stage != SG_SHADERSTAGE_NONE) {
  7080. dst->stage = src->stage;
  7081. SOKOL_ASSERT((src->view_slot >= 0) && (src->view_slot < SG_MAX_VIEW_BINDSLOTS));
  7082. SOKOL_ASSERT(cmn->views[src->view_slot].view_type == SG_VIEWTYPE_TEXTURE);
  7083. SOKOL_ASSERT(cmn->views[src->view_slot].stage == src->stage);
  7084. dst->view_slot = src->view_slot;
  7085. SOKOL_ASSERT((src->sampler_slot >= 0) && (src->sampler_slot < SG_MAX_SAMPLER_BINDSLOTS));
  7086. SOKOL_ASSERT(desc->samplers[src->sampler_slot].stage == src->stage);
  7087. dst->sampler_slot = src->sampler_slot;
  7088. }
  7089. }
  7090. }
  7091. _SOKOL_PRIVATE void _sg_pipeline_common_init(_sg_pipeline_common_t* cmn, const sg_pipeline_desc* desc, _sg_shader_t* shd) {
  7092. SOKOL_ASSERT((desc->color_count >= 0) && (desc->color_count <= SG_MAX_COLOR_ATTACHMENTS));
  7093. // FIXME: most of this isn't needed for compute pipelines
  7094. const uint32_t required_bindings_flag = (1 << SG_MAX_UNIFORMBLOCK_BINDSLOTS);
  7095. for (size_t attr_idx = 0; attr_idx < SG_MAX_VERTEX_ATTRIBUTES; attr_idx++) {
  7096. const sg_vertex_attr_state* attr_state = &desc->layout.attrs[attr_idx];
  7097. if (attr_state->format != SG_VERTEXFORMAT_INVALID) {
  7098. SOKOL_ASSERT((attr_state->buffer_index >= 0) && (attr_state->buffer_index < SG_MAX_VERTEXBUFFER_BINDSLOTS));
  7099. cmn->vertex_buffer_layout_active[attr_state->buffer_index] = true;
  7100. cmn->required_bindings_and_uniforms |= required_bindings_flag;
  7101. }
  7102. }
  7103. cmn->use_instanced_draw = false;
  7104. for (size_t vbuf_idx = 0; vbuf_idx < SG_MAX_VERTEXBUFFER_BINDSLOTS; vbuf_idx++) {
  7105. const sg_vertex_buffer_layout_state* vbuf_state = &desc->layout.buffers[vbuf_idx];
  7106. if (vbuf_state->step_func == SG_VERTEXSTEP_PER_INSTANCE) {
  7107. cmn->use_instanced_draw = true;
  7108. }
  7109. }
  7110. cmn->is_compute = desc->compute;
  7111. cmn->shader = _sg_shader_ref(shd);
  7112. cmn->layout = desc->layout;
  7113. cmn->depth = desc->depth;
  7114. cmn->stencil = desc->stencil;
  7115. cmn->color_count = desc->color_count;
  7116. for (int i = 0; i < desc->color_count; i++) {
  7117. cmn->colors[i] = desc->colors[i];
  7118. }
  7119. cmn->primitive_type = desc->primitive_type;
  7120. cmn->index_type = desc->index_type;
  7121. if (cmn->index_type != SG_INDEXTYPE_NONE) {
  7122. cmn->required_bindings_and_uniforms |= required_bindings_flag;
  7123. }
  7124. cmn->cull_mode = desc->cull_mode;
  7125. cmn->face_winding = desc->face_winding;
  7126. cmn->sample_count = desc->sample_count;
  7127. cmn->blend_color = desc->blend_color;
  7128. cmn->alpha_to_coverage_enabled = desc->alpha_to_coverage_enabled;
  7129. }
  7130. _SOKOL_PRIVATE void _sg_buffer_view_common_init(_sg_buffer_view_common_t* cmn, const sg_buffer_view_desc* desc, _sg_buffer_t* buf) {
  7131. SOKOL_ASSERT(SG_RESOURCESTATE_VALID == buf->slot.state);
  7132. cmn->ref = _sg_buffer_ref(buf);
  7133. cmn->offset = desc->offset;
  7134. }
  7135. _SOKOL_PRIVATE void _sg_texture_view_common_init(_sg_image_view_common_t* cmn, const sg_texture_view_desc* desc, _sg_image_t* img) {
  7136. SOKOL_ASSERT(SG_RESOURCESTATE_VALID == img->slot.state);
  7137. cmn->ref = _sg_image_ref(img);
  7138. cmn->mip_level = desc->mip_levels.base;
  7139. cmn->mip_level_count = _sg_def(desc->mip_levels.count, img->cmn.num_mipmaps - cmn->mip_level);
  7140. cmn->slice = desc->slices.base;
  7141. switch (img->cmn.type) {
  7142. case SG_IMAGETYPE_2D:
  7143. cmn->slice_count = 1;
  7144. break;
  7145. case SG_IMAGETYPE_CUBE:
  7146. cmn->slice_count = 6;
  7147. break;
  7148. case SG_IMAGETYPE_3D:
  7149. cmn->slice_count = 1;
  7150. break;
  7151. case SG_IMAGETYPE_ARRAY:
  7152. cmn->slice_count = _sg_def(desc->slices.count, img->cmn.num_slices - cmn->slice);
  7153. break;
  7154. default:
  7155. SOKOL_UNREACHABLE;
  7156. }
  7157. }
  7158. _SOKOL_PRIVATE void _sg_image_view_common_init(_sg_image_view_common_t* cmn, const sg_image_view_desc* desc, _sg_image_t* img) {
  7159. SOKOL_ASSERT(SG_RESOURCESTATE_VALID == img->slot.state);
  7160. cmn->ref = _sg_image_ref(img);
  7161. cmn->mip_level = desc->mip_level;
  7162. cmn->mip_level_count = 1;
  7163. cmn->slice = desc->slice;
  7164. cmn->slice_count = 1;
  7165. }
  7166. _SOKOL_PRIVATE void _sg_view_common_init(_sg_view_common_t* cmn, const sg_view_desc* desc, _sg_buffer_t* buf, _sg_image_t* img) {
  7167. if (desc->texture.image.id != SG_INVALID_ID) {
  7168. SOKOL_ASSERT(img);
  7169. cmn->type = SG_VIEWTYPE_TEXTURE;
  7170. _sg_texture_view_common_init(&cmn->img, &desc->texture, img);
  7171. } else if (desc->storage_buffer.buffer.id != SG_INVALID_ID) {
  7172. SOKOL_ASSERT(buf);
  7173. cmn->type = SG_VIEWTYPE_STORAGEBUFFER;
  7174. _sg_buffer_view_common_init(&cmn->buf, &desc->storage_buffer, buf);
  7175. } else if (desc->storage_image.image.id != SG_INVALID_ID) {
  7176. SOKOL_ASSERT(img);
  7177. cmn->type = SG_VIEWTYPE_STORAGEIMAGE;
  7178. _sg_image_view_common_init(&cmn->img, &desc->storage_image, img);
  7179. } else if (desc->color_attachment.image.id != SG_INVALID_ID) {
  7180. SOKOL_ASSERT(img);
  7181. cmn->type = SG_VIEWTYPE_COLORATTACHMENT;
  7182. _sg_image_view_common_init(&cmn->img, &desc->color_attachment, img);
  7183. } else if (desc->resolve_attachment.image.id != SG_INVALID_ID) {
  7184. SOKOL_ASSERT(img);
  7185. cmn->type = SG_VIEWTYPE_RESOLVEATTACHMENT;
  7186. _sg_image_view_common_init(&cmn->img, &desc->resolve_attachment, img);
  7187. } else if (desc->depth_stencil_attachment.image.id != SG_INVALID_ID) {
  7188. SOKOL_ASSERT(img);
  7189. cmn->type = SG_VIEWTYPE_DEPTHSTENCILATTACHMENT;
  7190. _sg_image_view_common_init(&cmn->img, &desc->depth_stencil_attachment, img);
  7191. } else {
  7192. SOKOL_UNREACHABLE;
  7193. }
  7194. }
  7195. _SOKOL_PRIVATE int _sg_vertexformat_bytesize(sg_vertex_format fmt) {
  7196. switch (fmt) {
  7197. case SG_VERTEXFORMAT_FLOAT: return 4;
  7198. case SG_VERTEXFORMAT_FLOAT2: return 8;
  7199. case SG_VERTEXFORMAT_FLOAT3: return 12;
  7200. case SG_VERTEXFORMAT_FLOAT4: return 16;
  7201. case SG_VERTEXFORMAT_INT: return 4;
  7202. case SG_VERTEXFORMAT_INT2: return 8;
  7203. case SG_VERTEXFORMAT_INT3: return 12;
  7204. case SG_VERTEXFORMAT_INT4: return 16;
  7205. case SG_VERTEXFORMAT_UINT: return 4;
  7206. case SG_VERTEXFORMAT_UINT2: return 8;
  7207. case SG_VERTEXFORMAT_UINT3: return 12;
  7208. case SG_VERTEXFORMAT_UINT4: return 16;
  7209. case SG_VERTEXFORMAT_BYTE4: return 4;
  7210. case SG_VERTEXFORMAT_BYTE4N: return 4;
  7211. case SG_VERTEXFORMAT_UBYTE4: return 4;
  7212. case SG_VERTEXFORMAT_UBYTE4N: return 4;
  7213. case SG_VERTEXFORMAT_SHORT2: return 4;
  7214. case SG_VERTEXFORMAT_SHORT2N: return 4;
  7215. case SG_VERTEXFORMAT_USHORT2: return 4;
  7216. case SG_VERTEXFORMAT_USHORT2N: return 4;
  7217. case SG_VERTEXFORMAT_SHORT4: return 8;
  7218. case SG_VERTEXFORMAT_SHORT4N: return 8;
  7219. case SG_VERTEXFORMAT_USHORT4: return 8;
  7220. case SG_VERTEXFORMAT_USHORT4N: return 8;
  7221. case SG_VERTEXFORMAT_UINT10_N2: return 4;
  7222. case SG_VERTEXFORMAT_HALF2: return 4;
  7223. case SG_VERTEXFORMAT_HALF4: return 8;
  7224. case SG_VERTEXFORMAT_INVALID: return 0;
  7225. default:
  7226. SOKOL_UNREACHABLE;
  7227. return -1;
  7228. }
  7229. }
  7230. _SOKOL_PRIVATE const char* _sg_vertexformat_to_string(sg_vertex_format fmt) {
  7231. switch (fmt) {
  7232. case SG_VERTEXFORMAT_FLOAT: return "FLOAT";
  7233. case SG_VERTEXFORMAT_FLOAT2: return "FLOAT2";
  7234. case SG_VERTEXFORMAT_FLOAT3: return "FLOAT3";
  7235. case SG_VERTEXFORMAT_FLOAT4: return "FLOAT4";
  7236. case SG_VERTEXFORMAT_INT: return "INT";
  7237. case SG_VERTEXFORMAT_INT2: return "INT2";
  7238. case SG_VERTEXFORMAT_INT3: return "INT3";
  7239. case SG_VERTEXFORMAT_INT4: return "INT4";
  7240. case SG_VERTEXFORMAT_UINT: return "UINT";
  7241. case SG_VERTEXFORMAT_UINT2: return "UINT2";
  7242. case SG_VERTEXFORMAT_UINT3: return "UINT3";
  7243. case SG_VERTEXFORMAT_UINT4: return "UINT4";
  7244. case SG_VERTEXFORMAT_BYTE4: return "BYTE4";
  7245. case SG_VERTEXFORMAT_BYTE4N: return "BYTE4N";
  7246. case SG_VERTEXFORMAT_UBYTE4: return "UBYTE4";
  7247. case SG_VERTEXFORMAT_UBYTE4N: return "UBYTE4N";
  7248. case SG_VERTEXFORMAT_SHORT2: return "SHORT2";
  7249. case SG_VERTEXFORMAT_SHORT2N: return "SHORT2N";
  7250. case SG_VERTEXFORMAT_USHORT2: return "USHORT2";
  7251. case SG_VERTEXFORMAT_USHORT2N: return "USHORT2N";
  7252. case SG_VERTEXFORMAT_SHORT4: return "SHORT4";
  7253. case SG_VERTEXFORMAT_SHORT4N: return "SHORT4N";
  7254. case SG_VERTEXFORMAT_USHORT4: return "USHORT4";
  7255. case SG_VERTEXFORMAT_USHORT4N: return "USHORT4N";
  7256. case SG_VERTEXFORMAT_UINT10_N2: return "UINT10_N2";
  7257. case SG_VERTEXFORMAT_HALF2: return "HALF2";
  7258. case SG_VERTEXFORMAT_HALF4: return "HALF4";
  7259. default:
  7260. SOKOL_UNREACHABLE;
  7261. return "INVALID";
  7262. }
  7263. }
  7264. _SOKOL_PRIVATE const char* _sg_shaderattrbasetype_to_string(sg_shader_attr_base_type b) {
  7265. switch (b) {
  7266. case SG_SHADERATTRBASETYPE_UNDEFINED: return "UNDEFINED";
  7267. case SG_SHADERATTRBASETYPE_FLOAT: return "FLOAT";
  7268. case SG_SHADERATTRBASETYPE_SINT: return "SINT";
  7269. case SG_SHADERATTRBASETYPE_UINT: return "UINT";
  7270. default:
  7271. SOKOL_UNREACHABLE;
  7272. return "INVALID";
  7273. }
  7274. }
  7275. _SOKOL_PRIVATE sg_shader_attr_base_type _sg_vertexformat_basetype(sg_vertex_format fmt) {
  7276. switch (fmt) {
  7277. case SG_VERTEXFORMAT_FLOAT:
  7278. case SG_VERTEXFORMAT_FLOAT2:
  7279. case SG_VERTEXFORMAT_FLOAT3:
  7280. case SG_VERTEXFORMAT_FLOAT4:
  7281. case SG_VERTEXFORMAT_HALF2:
  7282. case SG_VERTEXFORMAT_HALF4:
  7283. case SG_VERTEXFORMAT_BYTE4N:
  7284. case SG_VERTEXFORMAT_UBYTE4N:
  7285. case SG_VERTEXFORMAT_SHORT2N:
  7286. case SG_VERTEXFORMAT_USHORT2N:
  7287. case SG_VERTEXFORMAT_SHORT4N:
  7288. case SG_VERTEXFORMAT_USHORT4N:
  7289. case SG_VERTEXFORMAT_UINT10_N2:
  7290. return SG_SHADERATTRBASETYPE_FLOAT;
  7291. case SG_VERTEXFORMAT_INT:
  7292. case SG_VERTEXFORMAT_INT2:
  7293. case SG_VERTEXFORMAT_INT3:
  7294. case SG_VERTEXFORMAT_INT4:
  7295. case SG_VERTEXFORMAT_BYTE4:
  7296. case SG_VERTEXFORMAT_SHORT2:
  7297. case SG_VERTEXFORMAT_SHORT4:
  7298. return SG_SHADERATTRBASETYPE_SINT;
  7299. case SG_VERTEXFORMAT_UINT:
  7300. case SG_VERTEXFORMAT_UINT2:
  7301. case SG_VERTEXFORMAT_UINT3:
  7302. case SG_VERTEXFORMAT_UINT4:
  7303. case SG_VERTEXFORMAT_UBYTE4:
  7304. case SG_VERTEXFORMAT_USHORT2:
  7305. case SG_VERTEXFORMAT_USHORT4:
  7306. return SG_SHADERATTRBASETYPE_UINT;
  7307. default:
  7308. SOKOL_UNREACHABLE;
  7309. return SG_SHADERATTRBASETYPE_UNDEFINED;
  7310. }
  7311. }
  7312. _SOKOL_PRIVATE uint32_t _sg_uniform_alignment(sg_uniform_type type, int array_count, sg_uniform_layout ub_layout) {
  7313. if (ub_layout == SG_UNIFORMLAYOUT_NATIVE) {
  7314. return 1;
  7315. } else {
  7316. SOKOL_ASSERT(array_count > 0);
  7317. if (array_count == 1) {
  7318. switch (type) {
  7319. case SG_UNIFORMTYPE_FLOAT:
  7320. case SG_UNIFORMTYPE_INT:
  7321. return 4;
  7322. case SG_UNIFORMTYPE_FLOAT2:
  7323. case SG_UNIFORMTYPE_INT2:
  7324. return 8;
  7325. case SG_UNIFORMTYPE_FLOAT3:
  7326. case SG_UNIFORMTYPE_FLOAT4:
  7327. case SG_UNIFORMTYPE_INT3:
  7328. case SG_UNIFORMTYPE_INT4:
  7329. return 16;
  7330. case SG_UNIFORMTYPE_MAT4:
  7331. return 16;
  7332. default:
  7333. SOKOL_UNREACHABLE;
  7334. return 1;
  7335. }
  7336. } else {
  7337. return 16;
  7338. }
  7339. }
  7340. }
  7341. _SOKOL_PRIVATE uint32_t _sg_uniform_size(sg_uniform_type type, int array_count, sg_uniform_layout ub_layout) {
  7342. SOKOL_ASSERT(array_count > 0);
  7343. if (array_count == 1) {
  7344. switch (type) {
  7345. case SG_UNIFORMTYPE_FLOAT:
  7346. case SG_UNIFORMTYPE_INT:
  7347. return 4;
  7348. case SG_UNIFORMTYPE_FLOAT2:
  7349. case SG_UNIFORMTYPE_INT2:
  7350. return 8;
  7351. case SG_UNIFORMTYPE_FLOAT3:
  7352. case SG_UNIFORMTYPE_INT3:
  7353. return 12;
  7354. case SG_UNIFORMTYPE_FLOAT4:
  7355. case SG_UNIFORMTYPE_INT4:
  7356. return 16;
  7357. case SG_UNIFORMTYPE_MAT4:
  7358. return 64;
  7359. default:
  7360. SOKOL_UNREACHABLE;
  7361. return 0;
  7362. }
  7363. } else {
  7364. if (ub_layout == SG_UNIFORMLAYOUT_NATIVE) {
  7365. switch (type) {
  7366. case SG_UNIFORMTYPE_FLOAT:
  7367. case SG_UNIFORMTYPE_INT:
  7368. return 4 * (uint32_t)array_count;
  7369. case SG_UNIFORMTYPE_FLOAT2:
  7370. case SG_UNIFORMTYPE_INT2:
  7371. return 8 * (uint32_t)array_count;
  7372. case SG_UNIFORMTYPE_FLOAT3:
  7373. case SG_UNIFORMTYPE_INT3:
  7374. return 12 * (uint32_t)array_count;
  7375. case SG_UNIFORMTYPE_FLOAT4:
  7376. case SG_UNIFORMTYPE_INT4:
  7377. return 16 * (uint32_t)array_count;
  7378. case SG_UNIFORMTYPE_MAT4:
  7379. return 64 * (uint32_t)array_count;
  7380. default:
  7381. SOKOL_UNREACHABLE;
  7382. return 0;
  7383. }
  7384. } else {
  7385. switch (type) {
  7386. case SG_UNIFORMTYPE_FLOAT:
  7387. case SG_UNIFORMTYPE_FLOAT2:
  7388. case SG_UNIFORMTYPE_FLOAT3:
  7389. case SG_UNIFORMTYPE_FLOAT4:
  7390. case SG_UNIFORMTYPE_INT:
  7391. case SG_UNIFORMTYPE_INT2:
  7392. case SG_UNIFORMTYPE_INT3:
  7393. case SG_UNIFORMTYPE_INT4:
  7394. return 16 * (uint32_t)array_count;
  7395. case SG_UNIFORMTYPE_MAT4:
  7396. return 64 * (uint32_t)array_count;
  7397. default:
  7398. SOKOL_UNREACHABLE;
  7399. return 0;
  7400. }
  7401. }
  7402. }
  7403. }
  7404. _SOKOL_PRIVATE bool _sg_is_compressed_pixel_format(sg_pixel_format fmt) {
  7405. switch (fmt) {
  7406. case SG_PIXELFORMAT_BC1_RGBA:
  7407. case SG_PIXELFORMAT_BC2_RGBA:
  7408. case SG_PIXELFORMAT_BC3_RGBA:
  7409. case SG_PIXELFORMAT_BC3_SRGBA:
  7410. case SG_PIXELFORMAT_BC4_R:
  7411. case SG_PIXELFORMAT_BC4_RSN:
  7412. case SG_PIXELFORMAT_BC5_RG:
  7413. case SG_PIXELFORMAT_BC5_RGSN:
  7414. case SG_PIXELFORMAT_BC6H_RGBF:
  7415. case SG_PIXELFORMAT_BC6H_RGBUF:
  7416. case SG_PIXELFORMAT_BC7_RGBA:
  7417. case SG_PIXELFORMAT_BC7_SRGBA:
  7418. case SG_PIXELFORMAT_ETC2_RGB8:
  7419. case SG_PIXELFORMAT_ETC2_SRGB8:
  7420. case SG_PIXELFORMAT_ETC2_RGB8A1:
  7421. case SG_PIXELFORMAT_ETC2_RGBA8:
  7422. case SG_PIXELFORMAT_ETC2_SRGB8A8:
  7423. case SG_PIXELFORMAT_EAC_R11:
  7424. case SG_PIXELFORMAT_EAC_R11SN:
  7425. case SG_PIXELFORMAT_EAC_RG11:
  7426. case SG_PIXELFORMAT_EAC_RG11SN:
  7427. case SG_PIXELFORMAT_ASTC_4x4_RGBA:
  7428. case SG_PIXELFORMAT_ASTC_4x4_SRGBA:
  7429. return true;
  7430. default:
  7431. return false;
  7432. }
  7433. }
  7434. _SOKOL_PRIVATE bool _sg_is_valid_attachment_color_format(sg_pixel_format fmt) {
  7435. const int fmt_index = (int) fmt;
  7436. SOKOL_ASSERT((fmt_index >= 0) && (fmt_index < _SG_PIXELFORMAT_NUM));
  7437. return _sg.formats[fmt_index].render && !_sg.formats[fmt_index].depth;
  7438. }
  7439. _SOKOL_PRIVATE bool _sg_is_valid_attachment_depth_format(sg_pixel_format fmt) {
  7440. const int fmt_index = (int) fmt;
  7441. SOKOL_ASSERT((fmt_index >= 0) && (fmt_index < _SG_PIXELFORMAT_NUM));
  7442. return _sg.formats[fmt_index].render && _sg.formats[fmt_index].depth;
  7443. }
  7444. _SOKOL_PRIVATE bool _sg_is_valid_storage_image_format(sg_pixel_format fmt) {
  7445. const int fmt_index = (int) fmt;
  7446. SOKOL_ASSERT((fmt_index >= 0) && (fmt_index < _SG_PIXELFORMAT_NUM));
  7447. return _sg.formats[fmt_index].read || _sg.formats[fmt_index].write;
  7448. }
  7449. _SOKOL_PRIVATE bool _sg_is_depth_or_depth_stencil_format(sg_pixel_format fmt) {
  7450. return (SG_PIXELFORMAT_DEPTH == fmt) || (SG_PIXELFORMAT_DEPTH_STENCIL == fmt);
  7451. }
  7452. _SOKOL_PRIVATE bool _sg_is_depth_stencil_format(sg_pixel_format fmt) {
  7453. return (SG_PIXELFORMAT_DEPTH_STENCIL == fmt);
  7454. }
  7455. _SOKOL_PRIVATE int _sg_pixelformat_bytesize(sg_pixel_format fmt) {
  7456. switch (fmt) {
  7457. case SG_PIXELFORMAT_R8:
  7458. case SG_PIXELFORMAT_R8SN:
  7459. case SG_PIXELFORMAT_R8UI:
  7460. case SG_PIXELFORMAT_R8SI:
  7461. return 1;
  7462. case SG_PIXELFORMAT_R16:
  7463. case SG_PIXELFORMAT_R16SN:
  7464. case SG_PIXELFORMAT_R16UI:
  7465. case SG_PIXELFORMAT_R16SI:
  7466. case SG_PIXELFORMAT_R16F:
  7467. case SG_PIXELFORMAT_RG8:
  7468. case SG_PIXELFORMAT_RG8SN:
  7469. case SG_PIXELFORMAT_RG8UI:
  7470. case SG_PIXELFORMAT_RG8SI:
  7471. return 2;
  7472. case SG_PIXELFORMAT_R32UI:
  7473. case SG_PIXELFORMAT_R32SI:
  7474. case SG_PIXELFORMAT_R32F:
  7475. case SG_PIXELFORMAT_RG16:
  7476. case SG_PIXELFORMAT_RG16SN:
  7477. case SG_PIXELFORMAT_RG16UI:
  7478. case SG_PIXELFORMAT_RG16SI:
  7479. case SG_PIXELFORMAT_RG16F:
  7480. case SG_PIXELFORMAT_RGBA8:
  7481. case SG_PIXELFORMAT_SRGB8A8:
  7482. case SG_PIXELFORMAT_RGBA8SN:
  7483. case SG_PIXELFORMAT_RGBA8UI:
  7484. case SG_PIXELFORMAT_RGBA8SI:
  7485. case SG_PIXELFORMAT_BGRA8:
  7486. case SG_PIXELFORMAT_RGB10A2:
  7487. case SG_PIXELFORMAT_RG11B10F:
  7488. case SG_PIXELFORMAT_RGB9E5:
  7489. return 4;
  7490. case SG_PIXELFORMAT_RG32UI:
  7491. case SG_PIXELFORMAT_RG32SI:
  7492. case SG_PIXELFORMAT_RG32F:
  7493. case SG_PIXELFORMAT_RGBA16:
  7494. case SG_PIXELFORMAT_RGBA16SN:
  7495. case SG_PIXELFORMAT_RGBA16UI:
  7496. case SG_PIXELFORMAT_RGBA16SI:
  7497. case SG_PIXELFORMAT_RGBA16F:
  7498. return 8;
  7499. case SG_PIXELFORMAT_RGBA32UI:
  7500. case SG_PIXELFORMAT_RGBA32SI:
  7501. case SG_PIXELFORMAT_RGBA32F:
  7502. return 16;
  7503. case SG_PIXELFORMAT_DEPTH:
  7504. case SG_PIXELFORMAT_DEPTH_STENCIL:
  7505. return 4;
  7506. default:
  7507. SOKOL_UNREACHABLE;
  7508. return 0;
  7509. }
  7510. }
  7511. // return the texture block width/height of an image format
  7512. _SOKOL_PRIVATE int _sg_block_dim(sg_pixel_format fmt) {
  7513. if (_sg_is_compressed_pixel_format(fmt)) {
  7514. return 4;
  7515. } else {
  7516. return 1;
  7517. }
  7518. }
  7519. // return texture block size in bytes
  7520. _SOKOL_PRIVATE int _sg_block_bytesize(sg_pixel_format fmt) {
  7521. switch (fmt) {
  7522. case SG_PIXELFORMAT_BC1_RGBA:
  7523. case SG_PIXELFORMAT_BC4_R:
  7524. case SG_PIXELFORMAT_BC4_RSN:
  7525. case SG_PIXELFORMAT_ETC2_RGB8:
  7526. case SG_PIXELFORMAT_ETC2_SRGB8:
  7527. case SG_PIXELFORMAT_ETC2_RGB8A1:
  7528. case SG_PIXELFORMAT_EAC_R11:
  7529. case SG_PIXELFORMAT_EAC_R11SN:
  7530. return 8;
  7531. case SG_PIXELFORMAT_BC2_RGBA:
  7532. case SG_PIXELFORMAT_BC3_RGBA:
  7533. case SG_PIXELFORMAT_BC3_SRGBA:
  7534. case SG_PIXELFORMAT_BC5_RG:
  7535. case SG_PIXELFORMAT_BC5_RGSN:
  7536. case SG_PIXELFORMAT_BC6H_RGBF:
  7537. case SG_PIXELFORMAT_BC6H_RGBUF:
  7538. case SG_PIXELFORMAT_BC7_RGBA:
  7539. case SG_PIXELFORMAT_BC7_SRGBA:
  7540. case SG_PIXELFORMAT_ETC2_RGBA8:
  7541. case SG_PIXELFORMAT_ETC2_SRGB8A8:
  7542. case SG_PIXELFORMAT_EAC_RG11:
  7543. case SG_PIXELFORMAT_EAC_RG11SN:
  7544. case SG_PIXELFORMAT_ASTC_4x4_RGBA:
  7545. case SG_PIXELFORMAT_ASTC_4x4_SRGBA:
  7546. return 16;
  7547. default:
  7548. return _sg_pixelformat_bytesize(fmt);
  7549. }
  7550. }
  7551. /* return row pitch for an image
  7552. see ComputePitch in https://github.com/microsoft/DirectXTex/blob/master/DirectXTex/DirectXTexUtil.cpp
  7553. */
  7554. _SOKOL_PRIVATE int _sg_row_pitch(sg_pixel_format fmt, int width, int row_align) {
  7555. const int block_dim = _sg_block_dim(fmt);
  7556. const int num_blocks_in_row = (width + (block_dim-1)) / block_dim;
  7557. const int block_num_bytes = _sg_block_bytesize(fmt);
  7558. int pitch = num_blocks_in_row * block_num_bytes;
  7559. pitch = (pitch < block_num_bytes) ? block_num_bytes : pitch;
  7560. pitch = _sg_roundup(pitch, row_align);
  7561. return pitch;
  7562. }
  7563. // compute the number of rows in a surface depending on pixel format
  7564. _SOKOL_PRIVATE int _sg_num_rows(sg_pixel_format fmt, int height) {
  7565. const int block_dim = _sg_block_dim(fmt);
  7566. int num_rows = (height + (block_dim-1)) / block_dim;
  7567. if (num_rows < 1) {
  7568. num_rows = 1;
  7569. }
  7570. return num_rows;
  7571. }
  7572. /* return pitch of a 2D subimage / texture slice
  7573. see ComputePitch in https://github.com/microsoft/DirectXTex/blob/master/DirectXTex/DirectXTexUtil.cpp
  7574. */
  7575. _SOKOL_PRIVATE int _sg_surface_pitch(sg_pixel_format fmt, int width, int height, int row_align) {
  7576. int num_rows = _sg_num_rows(fmt, height);
  7577. return num_rows * _sg_row_pitch(fmt, width, row_align);
  7578. }
  7579. // capability table pixel format helper functions
  7580. _SOKOL_PRIVATE void _sg_pixelformat_all(_sg_pixelformat_info_t* pfi) {
  7581. pfi->sample = true;
  7582. pfi->filter = true;
  7583. pfi->blend = true;
  7584. pfi->render = true;
  7585. pfi->msaa = true;
  7586. }
  7587. _SOKOL_PRIVATE void _sg_pixelformat_s(_sg_pixelformat_info_t* pfi) {
  7588. pfi->sample = true;
  7589. }
  7590. _SOKOL_PRIVATE void _sg_pixelformat_sf(_sg_pixelformat_info_t* pfi) {
  7591. pfi->sample = true;
  7592. pfi->filter = true;
  7593. }
  7594. _SOKOL_PRIVATE void _sg_pixelformat_sr(_sg_pixelformat_info_t* pfi) {
  7595. pfi->sample = true;
  7596. pfi->render = true;
  7597. }
  7598. _SOKOL_PRIVATE void _sg_pixelformat_sfr(_sg_pixelformat_info_t* pfi) {
  7599. pfi->sample = true;
  7600. pfi->filter = true;
  7601. pfi->render = true;
  7602. }
  7603. _SOKOL_PRIVATE void _sg_pixelformat_srmd(_sg_pixelformat_info_t* pfi) {
  7604. pfi->sample = true;
  7605. pfi->render = true;
  7606. pfi->msaa = true;
  7607. pfi->depth = true;
  7608. }
  7609. _SOKOL_PRIVATE void _sg_pixelformat_srm(_sg_pixelformat_info_t* pfi) {
  7610. pfi->sample = true;
  7611. pfi->render = true;
  7612. pfi->msaa = true;
  7613. }
  7614. _SOKOL_PRIVATE void _sg_pixelformat_sfrm(_sg_pixelformat_info_t* pfi) {
  7615. pfi->sample = true;
  7616. pfi->filter = true;
  7617. pfi->render = true;
  7618. pfi->msaa = true;
  7619. }
  7620. _SOKOL_PRIVATE void _sg_pixelformat_sbrm(_sg_pixelformat_info_t* pfi) {
  7621. pfi->sample = true;
  7622. pfi->blend = true;
  7623. pfi->render = true;
  7624. pfi->msaa = true;
  7625. }
  7626. _SOKOL_PRIVATE void _sg_pixelformat_sbr(_sg_pixelformat_info_t* pfi) {
  7627. pfi->sample = true;
  7628. pfi->blend = true;
  7629. pfi->render = true;
  7630. }
  7631. _SOKOL_PRIVATE void _sg_pixelformat_sfbr(_sg_pixelformat_info_t* pfi) {
  7632. pfi->sample = true;
  7633. pfi->filter = true;
  7634. pfi->blend = true;
  7635. pfi->render = true;
  7636. }
  7637. _SOKOL_PRIVATE void _sg_pixelformat_compute_all(_sg_pixelformat_info_t* pfi) {
  7638. pfi->read = true;
  7639. pfi->write = true;
  7640. }
  7641. _SOKOL_PRIVATE void _sg_pixelformat_compute_writeonly(_sg_pixelformat_info_t* pfi) {
  7642. pfi->read = false;
  7643. pfi->write = true;
  7644. }
  7645. _SOKOL_PRIVATE sg_pass_action _sg_pass_action_defaults(const sg_pass_action* action) {
  7646. SOKOL_ASSERT(action);
  7647. sg_pass_action res = *action;
  7648. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  7649. if (res.colors[i].load_action == _SG_LOADACTION_DEFAULT) {
  7650. res.colors[i].load_action = SG_LOADACTION_CLEAR;
  7651. res.colors[i].clear_value.r = SG_DEFAULT_CLEAR_RED;
  7652. res.colors[i].clear_value.g = SG_DEFAULT_CLEAR_GREEN;
  7653. res.colors[i].clear_value.b = SG_DEFAULT_CLEAR_BLUE;
  7654. res.colors[i].clear_value.a = SG_DEFAULT_CLEAR_ALPHA;
  7655. }
  7656. if (res.colors[i].store_action == _SG_STOREACTION_DEFAULT) {
  7657. res.colors[i].store_action = SG_STOREACTION_STORE;
  7658. }
  7659. }
  7660. if (res.depth.load_action == _SG_LOADACTION_DEFAULT) {
  7661. res.depth.load_action = SG_LOADACTION_CLEAR;
  7662. res.depth.clear_value = SG_DEFAULT_CLEAR_DEPTH;
  7663. }
  7664. if (res.depth.store_action == _SG_STOREACTION_DEFAULT) {
  7665. res.depth.store_action = SG_STOREACTION_DONTCARE;
  7666. }
  7667. if (res.stencil.load_action == _SG_LOADACTION_DEFAULT) {
  7668. res.stencil.load_action = SG_LOADACTION_CLEAR;
  7669. res.stencil.clear_value = SG_DEFAULT_CLEAR_STENCIL;
  7670. }
  7671. if (res.stencil.store_action == _SG_STOREACTION_DEFAULT) {
  7672. res.stencil.store_action = SG_STOREACTION_DONTCARE;
  7673. }
  7674. return res;
  7675. }
  7676. // ██████ ██ ██ ███ ███ ███ ███ ██ ██ ██████ █████ ██████ ██ ██ ███████ ███ ██ ██████
  7677. // ██ ██ ██ ██ ████ ████ ████ ████ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ ██ ██
  7678. // ██ ██ ██ ██ ██ ████ ██ ██ ████ ██ ████ ██████ ███████ ██ █████ █████ ██ ██ ██ ██ ██
  7679. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  7680. // ██████ ██████ ██ ██ ██ ██ ██ ██████ ██ ██ ██████ ██ ██ ███████ ██ ████ ██████
  7681. //
  7682. // >>dummy backend
  7683. #if defined(SOKOL_DUMMY_BACKEND)
  7684. _SOKOL_PRIVATE void _sg_dummy_setup_backend(const sg_desc* desc) {
  7685. SOKOL_ASSERT(desc);
  7686. _SOKOL_UNUSED(desc);
  7687. _sg.backend = SG_BACKEND_DUMMY;
  7688. for (int i = SG_PIXELFORMAT_R8; i < SG_PIXELFORMAT_BC1_RGBA; i++) {
  7689. _sg.formats[i].sample = true;
  7690. _sg.formats[i].filter = true;
  7691. _sg.formats[i].render = true;
  7692. _sg.formats[i].blend = true;
  7693. _sg.formats[i].msaa = true;
  7694. }
  7695. _sg.formats[SG_PIXELFORMAT_DEPTH].depth = true;
  7696. _sg.formats[SG_PIXELFORMAT_DEPTH_STENCIL].depth = true;
  7697. _sg.limits.max_image_size_2d = 1024;
  7698. _sg.limits.max_image_size_cube = 1024;
  7699. _sg.limits.max_image_size_3d = 1024;
  7700. _sg.limits.max_image_size_array = 1024;
  7701. _sg.limits.max_image_array_layers = 1024;
  7702. _sg.limits.max_vertex_attrs = 16;
  7703. _sg.limits.max_color_attachments = SG_MAX_PORTABLE_COLOR_ATTACHMENTS;
  7704. _sg.limits.max_texture_bindings_per_stage = SG_MAX_PORTABLE_TEXTURE_BINDINGS_PER_STAGE;
  7705. _sg.limits.max_storage_buffer_bindings_per_stage = SG_MAX_PORTABLE_STORAGEBUFFER_BINDINGS_PER_STAGE;
  7706. _sg.limits.max_storage_image_bindings_per_stage = SG_MAX_PORTABLE_STORAGEIMAGE_BINDINGS_PER_STAGE;
  7707. }
  7708. _SOKOL_PRIVATE void _sg_dummy_discard_backend(void) {
  7709. // empty
  7710. }
  7711. _SOKOL_PRIVATE void _sg_dummy_reset_state_cache(void) {
  7712. // empty
  7713. }
  7714. _SOKOL_PRIVATE sg_resource_state _sg_dummy_create_buffer(_sg_buffer_t* buf, const sg_buffer_desc* desc) {
  7715. SOKOL_ASSERT(buf && desc);
  7716. _SOKOL_UNUSED(buf);
  7717. _SOKOL_UNUSED(desc);
  7718. return SG_RESOURCESTATE_VALID;
  7719. }
  7720. _SOKOL_PRIVATE void _sg_dummy_discard_buffer(_sg_buffer_t* buf) {
  7721. SOKOL_ASSERT(buf);
  7722. _SOKOL_UNUSED(buf);
  7723. }
  7724. _SOKOL_PRIVATE sg_resource_state _sg_dummy_create_image(_sg_image_t* img, const sg_image_desc* desc) {
  7725. SOKOL_ASSERT(img && desc);
  7726. _SOKOL_UNUSED(img);
  7727. _SOKOL_UNUSED(desc);
  7728. return SG_RESOURCESTATE_VALID;
  7729. }
  7730. _SOKOL_PRIVATE void _sg_dummy_discard_image(_sg_image_t* img) {
  7731. SOKOL_ASSERT(img);
  7732. _SOKOL_UNUSED(img);
  7733. }
  7734. _SOKOL_PRIVATE sg_resource_state _sg_dummy_create_sampler(_sg_sampler_t* smp, const sg_sampler_desc* desc) {
  7735. SOKOL_ASSERT(smp && desc);
  7736. _SOKOL_UNUSED(smp);
  7737. _SOKOL_UNUSED(desc);
  7738. return SG_RESOURCESTATE_VALID;
  7739. }
  7740. _SOKOL_PRIVATE void _sg_dummy_discard_sampler(_sg_sampler_t* smp) {
  7741. SOKOL_ASSERT(smp);
  7742. _SOKOL_UNUSED(smp);
  7743. }
  7744. _SOKOL_PRIVATE sg_resource_state _sg_dummy_create_shader(_sg_shader_t* shd, const sg_shader_desc* desc) {
  7745. SOKOL_ASSERT(shd && desc);
  7746. _SOKOL_UNUSED(shd);
  7747. _SOKOL_UNUSED(desc);
  7748. return SG_RESOURCESTATE_VALID;
  7749. }
  7750. _SOKOL_PRIVATE void _sg_dummy_discard_shader(_sg_shader_t* shd) {
  7751. SOKOL_ASSERT(shd);
  7752. _SOKOL_UNUSED(shd);
  7753. }
  7754. _SOKOL_PRIVATE sg_resource_state _sg_dummy_create_pipeline(_sg_pipeline_t* pip, const sg_pipeline_desc* desc) {
  7755. SOKOL_ASSERT(pip && desc);
  7756. _SOKOL_UNUSED(pip);
  7757. _SOKOL_UNUSED(desc);
  7758. return SG_RESOURCESTATE_VALID;
  7759. }
  7760. _SOKOL_PRIVATE void _sg_dummy_discard_pipeline(_sg_pipeline_t* pip) {
  7761. SOKOL_ASSERT(pip);
  7762. _SOKOL_UNUSED(pip);
  7763. }
  7764. _SOKOL_PRIVATE sg_resource_state _sg_dummy_create_view(_sg_view_t* view, const sg_view_desc* desc) {
  7765. SOKOL_ASSERT(view && desc);
  7766. _SOKOL_UNUSED(view);
  7767. _SOKOL_UNUSED(desc);
  7768. return SG_RESOURCESTATE_VALID;
  7769. }
  7770. _SOKOL_PRIVATE void _sg_dummy_discard_view(_sg_view_t* view) {
  7771. SOKOL_ASSERT(view);
  7772. _SOKOL_UNUSED(view);
  7773. }
  7774. _SOKOL_PRIVATE void _sg_dummy_begin_pass(const sg_pass* pass, const _sg_attachments_ptrs_t* atts) {
  7775. SOKOL_ASSERT(pass && atts);
  7776. _SOKOL_UNUSED(pass);
  7777. _SOKOL_UNUSED(atts);
  7778. }
  7779. _SOKOL_PRIVATE void _sg_dummy_end_pass(const _sg_attachments_ptrs_t* atts) {
  7780. SOKOL_ASSERT(atts);
  7781. _SOKOL_UNUSED(atts);
  7782. }
  7783. _SOKOL_PRIVATE void _sg_dummy_commit(void) {
  7784. // empty
  7785. }
  7786. _SOKOL_PRIVATE void _sg_dummy_apply_viewport(int x, int y, int w, int h, bool origin_top_left) {
  7787. _SOKOL_UNUSED(x);
  7788. _SOKOL_UNUSED(y);
  7789. _SOKOL_UNUSED(w);
  7790. _SOKOL_UNUSED(h);
  7791. _SOKOL_UNUSED(origin_top_left);
  7792. }
  7793. _SOKOL_PRIVATE void _sg_dummy_apply_scissor_rect(int x, int y, int w, int h, bool origin_top_left) {
  7794. _SOKOL_UNUSED(x);
  7795. _SOKOL_UNUSED(y);
  7796. _SOKOL_UNUSED(w);
  7797. _SOKOL_UNUSED(h);
  7798. _SOKOL_UNUSED(origin_top_left);
  7799. }
  7800. _SOKOL_PRIVATE void _sg_dummy_apply_pipeline(_sg_pipeline_t* pip) {
  7801. SOKOL_ASSERT(pip);
  7802. _SOKOL_UNUSED(pip);
  7803. }
  7804. _SOKOL_PRIVATE bool _sg_dummy_apply_bindings(_sg_bindings_ptrs_t* bnd) {
  7805. SOKOL_ASSERT(bnd);
  7806. SOKOL_ASSERT(bnd->pip);
  7807. _SOKOL_UNUSED(bnd);
  7808. return true;
  7809. }
  7810. _SOKOL_PRIVATE void _sg_dummy_apply_uniforms(int ub_slot, const sg_range* data) {
  7811. _SOKOL_UNUSED(ub_slot);
  7812. _SOKOL_UNUSED(data);
  7813. }
  7814. _SOKOL_PRIVATE void _sg_dummy_draw(int base_element, int num_elements, int num_instances, int base_vertex, int base_instance) {
  7815. _SOKOL_UNUSED(base_element);
  7816. _SOKOL_UNUSED(num_elements);
  7817. _SOKOL_UNUSED(num_instances);
  7818. _SOKOL_UNUSED(base_vertex);
  7819. _SOKOL_UNUSED(base_instance);
  7820. }
  7821. _SOKOL_PRIVATE void _sg_dummy_dispatch(int num_groups_x, int num_groups_y, int num_groups_z) {
  7822. _SOKOL_UNUSED(num_groups_x);
  7823. _SOKOL_UNUSED(num_groups_y);
  7824. _SOKOL_UNUSED(num_groups_z);
  7825. }
  7826. _SOKOL_PRIVATE void _sg_dummy_update_buffer(_sg_buffer_t* buf, const sg_range* data) {
  7827. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  7828. _SOKOL_UNUSED(data);
  7829. if (++buf->cmn.active_slot >= buf->cmn.num_slots) {
  7830. buf->cmn.active_slot = 0;
  7831. }
  7832. }
  7833. _SOKOL_PRIVATE bool _sg_dummy_append_buffer(_sg_buffer_t* buf, const sg_range* data, bool new_frame) {
  7834. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  7835. _SOKOL_UNUSED(data);
  7836. if (new_frame) {
  7837. if (++buf->cmn.active_slot >= buf->cmn.num_slots) {
  7838. buf->cmn.active_slot = 0;
  7839. }
  7840. }
  7841. return true;
  7842. }
  7843. _SOKOL_PRIVATE void _sg_dummy_update_image(_sg_image_t* img, const sg_image_data* data) {
  7844. SOKOL_ASSERT(img && data);
  7845. _SOKOL_UNUSED(data);
  7846. if (++img->cmn.active_slot >= img->cmn.num_slots) {
  7847. img->cmn.active_slot = 0;
  7848. }
  7849. }
  7850. // ██████ ██████ ███████ ███ ██ ██████ ██ ██████ █████ ██████ ██ ██ ███████ ███ ██ ██████
  7851. // ██ ██ ██ ██ ██ ████ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ ██ ██
  7852. // ██ ██ ██████ █████ ██ ██ ██ ██ ███ ██ ██████ ███████ ██ █████ █████ ██ ██ ██ ██ ██
  7853. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  7854. // ██████ ██ ███████ ██ ████ ██████ ███████ ██████ ██ ██ ██████ ██ ██ ███████ ██ ████ ██████
  7855. //
  7856. // >>opengl backend
  7857. #elif defined(_SOKOL_ANY_GL)
  7858. // optional GL loader for win32
  7859. #if defined(_SOKOL_USE_WIN32_GL_LOADER)
  7860. #ifndef SG_GL_FUNCS_EXT
  7861. #define SG_GL_FUNCS_EXT
  7862. #endif
  7863. // X Macro list of GL function names and signatures
  7864. #define _SG_GL_FUNCS \
  7865. SG_GL_FUNCS_EXT \
  7866. _SG_XMACRO(glBindVertexArray, void, (GLuint array)) \
  7867. _SG_XMACRO(glFramebufferTextureLayer, void, (GLenum target, GLenum attachment, GLuint texture, GLint level, GLint layer)) \
  7868. _SG_XMACRO(glGenFramebuffers, void, (GLsizei n, GLuint * framebuffers)) \
  7869. _SG_XMACRO(glBindFramebuffer, void, (GLenum target, GLuint framebuffer)) \
  7870. _SG_XMACRO(glBindRenderbuffer, void, (GLenum target, GLuint renderbuffer)) \
  7871. _SG_XMACRO(glGetStringi, const GLubyte *, (GLenum name, GLuint index)) \
  7872. _SG_XMACRO(glClearBufferfi, void, (GLenum buffer, GLint drawbuffer, GLfloat depth, GLint stencil)) \
  7873. _SG_XMACRO(glClearBufferfv, void, (GLenum buffer, GLint drawbuffer, const GLfloat * value)) \
  7874. _SG_XMACRO(glClearBufferuiv, void, (GLenum buffer, GLint drawbuffer, const GLuint * value)) \
  7875. _SG_XMACRO(glClearBufferiv, void, (GLenum buffer, GLint drawbuffer, const GLint * value)) \
  7876. _SG_XMACRO(glDeleteRenderbuffers, void, (GLsizei n, const GLuint * renderbuffers)) \
  7877. _SG_XMACRO(glUniform1fv, void, (GLint location, GLsizei count, const GLfloat * value)) \
  7878. _SG_XMACRO(glUniform2fv, void, (GLint location, GLsizei count, const GLfloat * value)) \
  7879. _SG_XMACRO(glUniform3fv, void, (GLint location, GLsizei count, const GLfloat * value)) \
  7880. _SG_XMACRO(glUniform4fv, void, (GLint location, GLsizei count, const GLfloat * value)) \
  7881. _SG_XMACRO(glUniform1iv, void, (GLint location, GLsizei count, const GLint * value)) \
  7882. _SG_XMACRO(glUniform2iv, void, (GLint location, GLsizei count, const GLint * value)) \
  7883. _SG_XMACRO(glUniform3iv, void, (GLint location, GLsizei count, const GLint * value)) \
  7884. _SG_XMACRO(glUniform4iv, void, (GLint location, GLsizei count, const GLint * value)) \
  7885. _SG_XMACRO(glUniformMatrix4fv, void, (GLint location, GLsizei count, GLboolean transpose, const GLfloat * value)) \
  7886. _SG_XMACRO(glUseProgram, void, (GLuint program)) \
  7887. _SG_XMACRO(glShaderSource, void, (GLuint shader, GLsizei count, const GLchar *const* string, const GLint * length)) \
  7888. _SG_XMACRO(glLinkProgram, void, (GLuint program)) \
  7889. _SG_XMACRO(glGetUniformLocation, GLint, (GLuint program, const GLchar * name)) \
  7890. _SG_XMACRO(glGetShaderiv, void, (GLuint shader, GLenum pname, GLint * params)) \
  7891. _SG_XMACRO(glGetProgramInfoLog, void, (GLuint program, GLsizei bufSize, GLsizei * length, GLchar * infoLog)) \
  7892. _SG_XMACRO(glGetAttribLocation, GLint, (GLuint program, const GLchar * name)) \
  7893. _SG_XMACRO(glDisableVertexAttribArray, void, (GLuint index)) \
  7894. _SG_XMACRO(glDeleteShader, void, (GLuint shader)) \
  7895. _SG_XMACRO(glDeleteProgram, void, (GLuint program)) \
  7896. _SG_XMACRO(glCompileShader, void, (GLuint shader)) \
  7897. _SG_XMACRO(glStencilFuncSeparate, void, (GLenum face, GLenum func, GLint ref, GLuint mask)) \
  7898. _SG_XMACRO(glStencilOpSeparate, void, (GLenum face, GLenum sfail, GLenum dpfail, GLenum dppass)) \
  7899. _SG_XMACRO(glRenderbufferStorageMultisample, void, (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height)) \
  7900. _SG_XMACRO(glDrawBuffers, void, (GLsizei n, const GLenum * bufs)) \
  7901. _SG_XMACRO(glVertexAttribDivisor, void, (GLuint index, GLuint divisor)) \
  7902. _SG_XMACRO(glBufferSubData, void, (GLenum target, GLintptr offset, GLsizeiptr size, const void * data)) \
  7903. _SG_XMACRO(glGenBuffers, void, (GLsizei n, GLuint * buffers)) \
  7904. _SG_XMACRO(glCheckFramebufferStatus, GLenum, (GLenum target)) \
  7905. _SG_XMACRO(glFramebufferRenderbuffer, void, (GLenum target, GLenum attachment, GLenum renderbuffertarget, GLuint renderbuffer)) \
  7906. _SG_XMACRO(glCompressedTexImage2D, void, (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLint border, GLsizei imageSize, const void * data)) \
  7907. _SG_XMACRO(glCompressedTexImage3D, void, (GLenum target, GLint level, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLsizei imageSize, const void * data)) \
  7908. _SG_XMACRO(glActiveTexture, void, (GLenum texture)) \
  7909. _SG_XMACRO(glTexSubImage3D, void, (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLenum type, const void * pixels)) \
  7910. _SG_XMACRO(glRenderbufferStorage, void, (GLenum target, GLenum internalformat, GLsizei width, GLsizei height)) \
  7911. _SG_XMACRO(glGenTextures, void, (GLsizei n, GLuint * textures)) \
  7912. _SG_XMACRO(glPolygonOffset, void, (GLfloat factor, GLfloat units)) \
  7913. _SG_XMACRO(glDrawElements, void, (GLenum mode, GLsizei count, GLenum type, const void * indices)) \
  7914. _SG_XMACRO(glDeleteFramebuffers, void, (GLsizei n, const GLuint * framebuffers)) \
  7915. _SG_XMACRO(glBlendEquationSeparate, void, (GLenum modeRGB, GLenum modeAlpha)) \
  7916. _SG_XMACRO(glDeleteTextures, void, (GLsizei n, const GLuint * textures)) \
  7917. _SG_XMACRO(glGetProgramiv, void, (GLuint program, GLenum pname, GLint * params)) \
  7918. _SG_XMACRO(glBindTexture, void, (GLenum target, GLuint texture)) \
  7919. _SG_XMACRO(glTexImage3D, void, (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLsizei depth, GLint border, GLenum format, GLenum type, const void * pixels)) \
  7920. _SG_XMACRO(glCreateShader, GLuint, (GLenum type)) \
  7921. _SG_XMACRO(glTexSubImage2D, void, (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLenum type, const void * pixels)) \
  7922. _SG_XMACRO(glFramebufferTexture2D, void, (GLenum target, GLenum attachment, GLenum textarget, GLuint texture, GLint level)) \
  7923. _SG_XMACRO(glCreateProgram, GLuint, (void)) \
  7924. _SG_XMACRO(glViewport, void, (GLint x, GLint y, GLsizei width, GLsizei height)) \
  7925. _SG_XMACRO(glDeleteBuffers, void, (GLsizei n, const GLuint * buffers)) \
  7926. _SG_XMACRO(glDrawArrays, void, (GLenum mode, GLint first, GLsizei count)) \
  7927. _SG_XMACRO(glDrawElementsInstanced, void, (GLenum mode, GLsizei count, GLenum type, const void * indices, GLsizei instancecount)) \
  7928. _SG_XMACRO(glVertexAttribPointer, void, (GLuint index, GLint size, GLenum type, GLboolean normalized, GLsizei stride, const void * pointer)) \
  7929. _SG_XMACRO(glVertexAttribIPointer, void, (GLuint index, GLint size, GLenum type, GLsizei stride, const void * pointer)) \
  7930. _SG_XMACRO(glUniform1i, void, (GLint location, GLint v0)) \
  7931. _SG_XMACRO(glDisable, void, (GLenum cap)) \
  7932. _SG_XMACRO(glColorMask, void, (GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha)) \
  7933. _SG_XMACRO(glColorMaski, void, (GLuint buf, GLboolean red, GLboolean green, GLboolean blue, GLboolean alpha)) \
  7934. _SG_XMACRO(glBindBuffer, void, (GLenum target, GLuint buffer)) \
  7935. _SG_XMACRO(glDeleteVertexArrays, void, (GLsizei n, const GLuint * arrays)) \
  7936. _SG_XMACRO(glDepthMask, void, (GLboolean flag)) \
  7937. _SG_XMACRO(glDrawArraysInstanced, void, (GLenum mode, GLint first, GLsizei count, GLsizei instancecount)) \
  7938. _SG_XMACRO(glScissor, void, (GLint x, GLint y, GLsizei width, GLsizei height)) \
  7939. _SG_XMACRO(glGenRenderbuffers, void, (GLsizei n, GLuint * renderbuffers)) \
  7940. _SG_XMACRO(glBufferData, void, (GLenum target, GLsizeiptr size, const void * data, GLenum usage)) \
  7941. _SG_XMACRO(glBlendFuncSeparate, void, (GLenum sfactorRGB, GLenum dfactorRGB, GLenum sfactorAlpha, GLenum dfactorAlpha)) \
  7942. _SG_XMACRO(glTexParameteri, void, (GLenum target, GLenum pname, GLint param)) \
  7943. _SG_XMACRO(glGetIntegerv, void, (GLenum pname, GLint * data)) \
  7944. _SG_XMACRO(glEnable, void, (GLenum cap)) \
  7945. _SG_XMACRO(glBlitFramebuffer, void, (GLint srcX0, GLint srcY0, GLint srcX1, GLint srcY1, GLint dstX0, GLint dstY0, GLint dstX1, GLint dstY1, GLbitfield mask, GLenum filter)) \
  7946. _SG_XMACRO(glStencilMask, void, (GLuint mask)) \
  7947. _SG_XMACRO(glAttachShader, void, (GLuint program, GLuint shader)) \
  7948. _SG_XMACRO(glGetError, GLenum, (void)) \
  7949. _SG_XMACRO(glBlendColor, void, (GLfloat red, GLfloat green, GLfloat blue, GLfloat alpha)) \
  7950. _SG_XMACRO(glTexParameterf, void, (GLenum target, GLenum pname, GLfloat param)) \
  7951. _SG_XMACRO(glTexParameterfv, void, (GLenum target, GLenum pname, const GLfloat* params)) \
  7952. _SG_XMACRO(glGetShaderInfoLog, void, (GLuint shader, GLsizei bufSize, GLsizei * length, GLchar * infoLog)) \
  7953. _SG_XMACRO(glDepthFunc, void, (GLenum func)) \
  7954. _SG_XMACRO(glStencilOp , void, (GLenum fail, GLenum zfail, GLenum zpass)) \
  7955. _SG_XMACRO(glStencilFunc, void, (GLenum func, GLint ref, GLuint mask)) \
  7956. _SG_XMACRO(glEnableVertexAttribArray, void, (GLuint index)) \
  7957. _SG_XMACRO(glBlendFunc, void, (GLenum sfactor, GLenum dfactor)) \
  7958. _SG_XMACRO(glReadBuffer, void, (GLenum src)) \
  7959. _SG_XMACRO(glTexImage2D, void, (GLenum target, GLint level, GLint internalformat, GLsizei width, GLsizei height, GLint border, GLenum format, GLenum type, const void * pixels)) \
  7960. _SG_XMACRO(glGenVertexArrays, void, (GLsizei n, GLuint * arrays)) \
  7961. _SG_XMACRO(glFrontFace, void, (GLenum mode)) \
  7962. _SG_XMACRO(glCullFace, void, (GLenum mode)) \
  7963. _SG_XMACRO(glPixelStorei, void, (GLenum pname, GLint param)) \
  7964. _SG_XMACRO(glBindSampler, void, (GLuint unit, GLuint sampler)) \
  7965. _SG_XMACRO(glGenSamplers, void, (GLsizei n, GLuint* samplers)) \
  7966. _SG_XMACRO(glSamplerParameteri, void, (GLuint sampler, GLenum pname, GLint param)) \
  7967. _SG_XMACRO(glSamplerParameterf, void, (GLuint sampler, GLenum pname, GLfloat param)) \
  7968. _SG_XMACRO(glSamplerParameterfv, void, (GLuint sampler, GLenum pname, const GLfloat* params)) \
  7969. _SG_XMACRO(glDeleteSamplers, void, (GLsizei n, const GLuint* samplers)) \
  7970. _SG_XMACRO(glBindBufferBase, void, (GLenum target, GLuint index, GLuint buffer)) \
  7971. _SG_XMACRO(glBindBufferRange, void, (GLenum target, GLuint index, GLuint buffer, GLintptr offset, GLsizeiptr size)) \
  7972. _SG_XMACRO(glTexImage2DMultisample, void, (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations)) \
  7973. _SG_XMACRO(glTexImage3DMultisample, void, (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations)) \
  7974. _SG_XMACRO(glDispatchCompute, void, (GLuint num_groups_x, GLuint num_groups_y, GLuint num_groups_z)) \
  7975. _SG_XMACRO(glMemoryBarrier, void, (GLbitfield barriers)) \
  7976. _SG_XMACRO(glBindImageTexture, void, (GLuint unit, GLuint texture, GLint level, GLboolean layered, GLint layer, GLenum access, GLenum format)) \
  7977. _SG_XMACRO(glTexStorage2DMultisample, void, (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLboolean fixedsamplelocations)) \
  7978. _SG_XMACRO(glTexStorage2D, void, (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height)) \
  7979. _SG_XMACRO(glTexStorage3DMultisample, void, (GLenum target, GLsizei samples, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth, GLboolean fixedsamplelocations)) \
  7980. _SG_XMACRO(glTexStorage3D, void, (GLenum target, GLsizei levels, GLenum internalformat, GLsizei width, GLsizei height, GLsizei depth)) \
  7981. _SG_XMACRO(glCompressedTexSubImage2D, void, (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLsizei width, GLsizei height, GLenum format, GLsizei imageSize, const void *data)) \
  7982. _SG_XMACRO(glCompressedTexSubImage3D, void, (GLenum target, GLint level, GLint xoffset, GLint yoffset, GLint zoffset, GLsizei width, GLsizei height, GLsizei depth, GLenum format, GLsizei imageSize, const void *data)) \
  7983. _SG_XMACRO(glTextureView, void, (GLuint texture, GLenum target, GLuint origtexture, GLenum internalformat, GLuint minlevel, GLuint numlevels, GLuint minlayer, GLuint numlayers)) \
  7984. _SG_XMACRO(glDrawElementsBaseVertex, void, (GLenum mode, GLsizei count, GLenum type, const void* indices, GLint basevertex)) \
  7985. _SG_XMACRO(glDrawElementsInstancedBaseVertex, void, (GLenum mode, GLsizei count, GLenum type, const void* indices, GLsizei instancecount, GLint basevertex)) \
  7986. _SG_XMACRO(glDrawElementsInstancedBaseVertexBaseInstance, void, (GLenum mode, GLsizei count, GLenum type, const void* indices, GLsizei instancecount, GLint basevertex, GLuint baseinstance)) \
  7987. _SG_XMACRO(glDrawArraysInstancedBaseInstance, void, (GLenum mode, GLint first, GLsizei count, GLsizei instancecount, GLuint baseinstance))
  7988. // generate GL function pointer typedefs
  7989. #define _SG_XMACRO(name, ret, args) typedef ret (GL_APIENTRY* PFN_ ## name) args;
  7990. _SG_GL_FUNCS
  7991. #undef _SG_XMACRO
  7992. // generate GL function pointers
  7993. #define _SG_XMACRO(name, ret, args) static PFN_ ## name name;
  7994. _SG_GL_FUNCS
  7995. #undef _SG_XMACRO
  7996. // helper function to lookup GL functions in GL DLL
  7997. typedef PROC (WINAPI * _sg_wglGetProcAddress)(LPCSTR);
  7998. _SOKOL_PRIVATE void* _sg_gl_getprocaddr(const char* name, _sg_wglGetProcAddress wgl_getprocaddress) {
  7999. void* proc_addr = (void*) wgl_getprocaddress(name);
  8000. if (0 == proc_addr) {
  8001. proc_addr = (void*) GetProcAddress(_sg.gl.opengl32_dll, name);
  8002. }
  8003. SOKOL_ASSERT(proc_addr);
  8004. return proc_addr;
  8005. }
  8006. // populate GL function pointers
  8007. _SOKOL_PRIVATE void _sg_gl_load_opengl(void) {
  8008. SOKOL_ASSERT(0 == _sg.gl.opengl32_dll);
  8009. _sg.gl.opengl32_dll = LoadLibraryA("opengl32.dll");
  8010. SOKOL_ASSERT(_sg.gl.opengl32_dll);
  8011. _sg_wglGetProcAddress wgl_getprocaddress = (_sg_wglGetProcAddress) GetProcAddress(_sg.gl.opengl32_dll, "wglGetProcAddress");
  8012. SOKOL_ASSERT(wgl_getprocaddress);
  8013. #define _SG_XMACRO(name, ret, args) name = (PFN_ ## name) _sg_gl_getprocaddr(#name, wgl_getprocaddress);
  8014. _SG_GL_FUNCS
  8015. #undef _SG_XMACRO
  8016. }
  8017. _SOKOL_PRIVATE void _sg_gl_unload_opengl(void) {
  8018. SOKOL_ASSERT(_sg.gl.opengl32_dll);
  8019. FreeLibrary(_sg.gl.opengl32_dll);
  8020. _sg.gl.opengl32_dll = 0;
  8021. }
  8022. #endif // _SOKOL_USE_WIN32_GL_LOADER
  8023. //-- type translation ----------------------------------------------------------
  8024. _SOKOL_PRIVATE GLenum _sg_gl_buffer_target(const sg_buffer_usage* usg) {
  8025. // NOTE: the buffer target returned here is only used for the bind point
  8026. // to copy data into the buffer, expect for WebGL2, the bind point doesn't
  8027. // need to match the later usage of the buffer (but because of the WebGL2
  8028. // restriction we cannot simply select a random bind point, because in WebGL2
  8029. // a buffer cannot 'switch' bind points later.
  8030. if (usg->vertex_buffer) {
  8031. return GL_ARRAY_BUFFER;
  8032. } else if (usg->index_buffer) {
  8033. return GL_ELEMENT_ARRAY_BUFFER;
  8034. } else if (usg->storage_buffer) {
  8035. return GL_SHADER_STORAGE_BUFFER;
  8036. } else {
  8037. SOKOL_UNREACHABLE; return 0;
  8038. }
  8039. }
  8040. _SOKOL_PRIVATE GLenum _sg_gl_texture_target(sg_image_type t, int sample_count) {
  8041. #if defined(SOKOL_GLCORE)
  8042. const bool msaa = sample_count > 1;
  8043. if (msaa) {
  8044. switch (t) {
  8045. case SG_IMAGETYPE_2D: return GL_TEXTURE_2D_MULTISAMPLE;
  8046. case SG_IMAGETYPE_ARRAY: return GL_TEXTURE_2D_MULTISAMPLE_ARRAY;
  8047. default: SOKOL_UNREACHABLE; return 0;
  8048. }
  8049. } else {
  8050. switch (t) {
  8051. case SG_IMAGETYPE_2D: return GL_TEXTURE_2D;
  8052. case SG_IMAGETYPE_CUBE: return GL_TEXTURE_CUBE_MAP;
  8053. case SG_IMAGETYPE_3D: return GL_TEXTURE_3D;
  8054. case SG_IMAGETYPE_ARRAY: return GL_TEXTURE_2D_ARRAY;
  8055. default: SOKOL_UNREACHABLE; return 0;
  8056. }
  8057. }
  8058. #else
  8059. SOKOL_ASSERT(sample_count == 1); _SOKOL_UNUSED(sample_count);
  8060. switch (t) {
  8061. case SG_IMAGETYPE_2D: return GL_TEXTURE_2D;
  8062. case SG_IMAGETYPE_CUBE: return GL_TEXTURE_CUBE_MAP;
  8063. case SG_IMAGETYPE_3D: return GL_TEXTURE_3D;
  8064. case SG_IMAGETYPE_ARRAY: return GL_TEXTURE_2D_ARRAY;
  8065. default: SOKOL_UNREACHABLE; return 0;
  8066. }
  8067. #endif
  8068. }
  8069. _SOKOL_PRIVATE GLenum _sg_gl_buffer_usage(const sg_buffer_usage* usg) {
  8070. if (usg->immutable) {
  8071. return GL_STATIC_DRAW;
  8072. } else if (usg->dynamic_update) {
  8073. return GL_DYNAMIC_DRAW;
  8074. } else if (usg->stream_update) {
  8075. return GL_STREAM_DRAW;
  8076. } else {
  8077. SOKOL_UNREACHABLE; return 0;
  8078. }
  8079. }
  8080. _SOKOL_PRIVATE GLenum _sg_gl_shader_stage(sg_shader_stage stage) {
  8081. switch (stage) {
  8082. case SG_SHADERSTAGE_VERTEX: return GL_VERTEX_SHADER;
  8083. case SG_SHADERSTAGE_FRAGMENT: return GL_FRAGMENT_SHADER;
  8084. case SG_SHADERSTAGE_COMPUTE: return GL_COMPUTE_SHADER;
  8085. default: SOKOL_UNREACHABLE; return 0;
  8086. }
  8087. }
  8088. _SOKOL_PRIVATE GLint _sg_gl_vertexformat_size(sg_vertex_format fmt) {
  8089. switch (fmt) {
  8090. case SG_VERTEXFORMAT_FLOAT: return 1;
  8091. case SG_VERTEXFORMAT_FLOAT2: return 2;
  8092. case SG_VERTEXFORMAT_FLOAT3: return 3;
  8093. case SG_VERTEXFORMAT_FLOAT4: return 4;
  8094. case SG_VERTEXFORMAT_INT: return 1;
  8095. case SG_VERTEXFORMAT_INT2: return 2;
  8096. case SG_VERTEXFORMAT_INT3: return 3;
  8097. case SG_VERTEXFORMAT_INT4: return 4;
  8098. case SG_VERTEXFORMAT_UINT: return 1;
  8099. case SG_VERTEXFORMAT_UINT2: return 2;
  8100. case SG_VERTEXFORMAT_UINT3: return 3;
  8101. case SG_VERTEXFORMAT_UINT4: return 4;
  8102. case SG_VERTEXFORMAT_BYTE4: return 4;
  8103. case SG_VERTEXFORMAT_BYTE4N: return 4;
  8104. case SG_VERTEXFORMAT_UBYTE4: return 4;
  8105. case SG_VERTEXFORMAT_UBYTE4N: return 4;
  8106. case SG_VERTEXFORMAT_SHORT2: return 2;
  8107. case SG_VERTEXFORMAT_SHORT2N: return 2;
  8108. case SG_VERTEXFORMAT_USHORT2: return 2;
  8109. case SG_VERTEXFORMAT_USHORT2N: return 2;
  8110. case SG_VERTEXFORMAT_SHORT4: return 4;
  8111. case SG_VERTEXFORMAT_SHORT4N: return 4;
  8112. case SG_VERTEXFORMAT_USHORT4: return 4;
  8113. case SG_VERTEXFORMAT_USHORT4N: return 4;
  8114. case SG_VERTEXFORMAT_UINT10_N2: return 4;
  8115. case SG_VERTEXFORMAT_HALF2: return 2;
  8116. case SG_VERTEXFORMAT_HALF4: return 4;
  8117. default: SOKOL_UNREACHABLE; return 0;
  8118. }
  8119. }
  8120. _SOKOL_PRIVATE GLenum _sg_gl_vertexformat_type(sg_vertex_format fmt) {
  8121. switch (fmt) {
  8122. case SG_VERTEXFORMAT_FLOAT:
  8123. case SG_VERTEXFORMAT_FLOAT2:
  8124. case SG_VERTEXFORMAT_FLOAT3:
  8125. case SG_VERTEXFORMAT_FLOAT4:
  8126. return GL_FLOAT;
  8127. case SG_VERTEXFORMAT_INT:
  8128. case SG_VERTEXFORMAT_INT2:
  8129. case SG_VERTEXFORMAT_INT3:
  8130. case SG_VERTEXFORMAT_INT4:
  8131. return GL_INT;
  8132. case SG_VERTEXFORMAT_UINT:
  8133. case SG_VERTEXFORMAT_UINT2:
  8134. case SG_VERTEXFORMAT_UINT3:
  8135. case SG_VERTEXFORMAT_UINT4:
  8136. return GL_UNSIGNED_INT;
  8137. case SG_VERTEXFORMAT_BYTE4:
  8138. case SG_VERTEXFORMAT_BYTE4N:
  8139. return GL_BYTE;
  8140. case SG_VERTEXFORMAT_UBYTE4:
  8141. case SG_VERTEXFORMAT_UBYTE4N:
  8142. return GL_UNSIGNED_BYTE;
  8143. case SG_VERTEXFORMAT_SHORT2:
  8144. case SG_VERTEXFORMAT_SHORT2N:
  8145. case SG_VERTEXFORMAT_SHORT4:
  8146. case SG_VERTEXFORMAT_SHORT4N:
  8147. return GL_SHORT;
  8148. case SG_VERTEXFORMAT_USHORT2:
  8149. case SG_VERTEXFORMAT_USHORT2N:
  8150. case SG_VERTEXFORMAT_USHORT4:
  8151. case SG_VERTEXFORMAT_USHORT4N:
  8152. return GL_UNSIGNED_SHORT;
  8153. case SG_VERTEXFORMAT_UINT10_N2:
  8154. return GL_UNSIGNED_INT_2_10_10_10_REV;
  8155. case SG_VERTEXFORMAT_HALF2:
  8156. case SG_VERTEXFORMAT_HALF4:
  8157. return GL_HALF_FLOAT;
  8158. default:
  8159. SOKOL_UNREACHABLE; return 0;
  8160. }
  8161. }
  8162. _SOKOL_PRIVATE GLboolean _sg_gl_vertexformat_normalized(sg_vertex_format fmt) {
  8163. switch (fmt) {
  8164. case SG_VERTEXFORMAT_BYTE4N:
  8165. case SG_VERTEXFORMAT_UBYTE4N:
  8166. case SG_VERTEXFORMAT_SHORT2N:
  8167. case SG_VERTEXFORMAT_USHORT2N:
  8168. case SG_VERTEXFORMAT_SHORT4N:
  8169. case SG_VERTEXFORMAT_USHORT4N:
  8170. case SG_VERTEXFORMAT_UINT10_N2:
  8171. return GL_TRUE;
  8172. default:
  8173. return GL_FALSE;
  8174. }
  8175. }
  8176. _SOKOL_PRIVATE GLenum _sg_gl_primitive_type(sg_primitive_type t) {
  8177. switch (t) {
  8178. case SG_PRIMITIVETYPE_POINTS: return GL_POINTS;
  8179. case SG_PRIMITIVETYPE_LINES: return GL_LINES;
  8180. case SG_PRIMITIVETYPE_LINE_STRIP: return GL_LINE_STRIP;
  8181. case SG_PRIMITIVETYPE_TRIANGLES: return GL_TRIANGLES;
  8182. case SG_PRIMITIVETYPE_TRIANGLE_STRIP: return GL_TRIANGLE_STRIP;
  8183. default: SOKOL_UNREACHABLE; return 0;
  8184. }
  8185. }
  8186. _SOKOL_PRIVATE GLenum _sg_gl_index_type(sg_index_type t) {
  8187. switch (t) {
  8188. case SG_INDEXTYPE_NONE: return 0;
  8189. case SG_INDEXTYPE_UINT16: return GL_UNSIGNED_SHORT;
  8190. case SG_INDEXTYPE_UINT32: return GL_UNSIGNED_INT;
  8191. default: SOKOL_UNREACHABLE; return 0;
  8192. }
  8193. }
  8194. _SOKOL_PRIVATE GLenum _sg_gl_compare_func(sg_compare_func cmp) {
  8195. switch (cmp) {
  8196. case SG_COMPAREFUNC_NEVER: return GL_NEVER;
  8197. case SG_COMPAREFUNC_LESS: return GL_LESS;
  8198. case SG_COMPAREFUNC_EQUAL: return GL_EQUAL;
  8199. case SG_COMPAREFUNC_LESS_EQUAL: return GL_LEQUAL;
  8200. case SG_COMPAREFUNC_GREATER: return GL_GREATER;
  8201. case SG_COMPAREFUNC_NOT_EQUAL: return GL_NOTEQUAL;
  8202. case SG_COMPAREFUNC_GREATER_EQUAL: return GL_GEQUAL;
  8203. case SG_COMPAREFUNC_ALWAYS: return GL_ALWAYS;
  8204. default: SOKOL_UNREACHABLE; return 0;
  8205. }
  8206. }
  8207. _SOKOL_PRIVATE GLenum _sg_gl_stencil_op(sg_stencil_op op) {
  8208. switch (op) {
  8209. case SG_STENCILOP_KEEP: return GL_KEEP;
  8210. case SG_STENCILOP_ZERO: return GL_ZERO;
  8211. case SG_STENCILOP_REPLACE: return GL_REPLACE;
  8212. case SG_STENCILOP_INCR_CLAMP: return GL_INCR;
  8213. case SG_STENCILOP_DECR_CLAMP: return GL_DECR;
  8214. case SG_STENCILOP_INVERT: return GL_INVERT;
  8215. case SG_STENCILOP_INCR_WRAP: return GL_INCR_WRAP;
  8216. case SG_STENCILOP_DECR_WRAP: return GL_DECR_WRAP;
  8217. default: SOKOL_UNREACHABLE; return 0;
  8218. }
  8219. }
  8220. _SOKOL_PRIVATE GLenum _sg_gl_blend_factor(sg_blend_factor f) {
  8221. switch (f) {
  8222. case SG_BLENDFACTOR_ZERO: return GL_ZERO;
  8223. case SG_BLENDFACTOR_ONE: return GL_ONE;
  8224. case SG_BLENDFACTOR_SRC_COLOR: return GL_SRC_COLOR;
  8225. case SG_BLENDFACTOR_ONE_MINUS_SRC_COLOR: return GL_ONE_MINUS_SRC_COLOR;
  8226. case SG_BLENDFACTOR_SRC_ALPHA: return GL_SRC_ALPHA;
  8227. case SG_BLENDFACTOR_ONE_MINUS_SRC_ALPHA: return GL_ONE_MINUS_SRC_ALPHA;
  8228. case SG_BLENDFACTOR_DST_COLOR: return GL_DST_COLOR;
  8229. case SG_BLENDFACTOR_ONE_MINUS_DST_COLOR: return GL_ONE_MINUS_DST_COLOR;
  8230. case SG_BLENDFACTOR_DST_ALPHA: return GL_DST_ALPHA;
  8231. case SG_BLENDFACTOR_ONE_MINUS_DST_ALPHA: return GL_ONE_MINUS_DST_ALPHA;
  8232. case SG_BLENDFACTOR_SRC_ALPHA_SATURATED: return GL_SRC_ALPHA_SATURATE;
  8233. case SG_BLENDFACTOR_BLEND_COLOR: return GL_CONSTANT_COLOR;
  8234. case SG_BLENDFACTOR_ONE_MINUS_BLEND_COLOR: return GL_ONE_MINUS_CONSTANT_COLOR;
  8235. case SG_BLENDFACTOR_BLEND_ALPHA: return GL_CONSTANT_ALPHA;
  8236. case SG_BLENDFACTOR_ONE_MINUS_BLEND_ALPHA: return GL_ONE_MINUS_CONSTANT_ALPHA;
  8237. default: SOKOL_UNREACHABLE; return 0;
  8238. }
  8239. }
  8240. _SOKOL_PRIVATE GLenum _sg_gl_blend_op(sg_blend_op op) {
  8241. switch (op) {
  8242. case SG_BLENDOP_ADD: return GL_FUNC_ADD;
  8243. case SG_BLENDOP_SUBTRACT: return GL_FUNC_SUBTRACT;
  8244. case SG_BLENDOP_REVERSE_SUBTRACT: return GL_FUNC_REVERSE_SUBTRACT;
  8245. case SG_BLENDOP_MIN: return GL_MIN;
  8246. case SG_BLENDOP_MAX: return GL_MAX;
  8247. default: SOKOL_UNREACHABLE; return 0;
  8248. }
  8249. }
  8250. _SOKOL_PRIVATE GLenum _sg_gl_min_filter(sg_filter min_f, sg_filter mipmap_f) {
  8251. if (min_f == SG_FILTER_NEAREST) {
  8252. switch (mipmap_f) {
  8253. case SG_FILTER_NEAREST: return GL_NEAREST_MIPMAP_NEAREST;
  8254. case SG_FILTER_LINEAR: return GL_NEAREST_MIPMAP_LINEAR;
  8255. default: SOKOL_UNREACHABLE; return (GLenum)0;
  8256. }
  8257. } else if (min_f == SG_FILTER_LINEAR) {
  8258. switch (mipmap_f) {
  8259. case SG_FILTER_NEAREST: return GL_LINEAR_MIPMAP_NEAREST;
  8260. case SG_FILTER_LINEAR: return GL_LINEAR_MIPMAP_LINEAR;
  8261. default: SOKOL_UNREACHABLE; return (GLenum)0;
  8262. }
  8263. } else {
  8264. SOKOL_UNREACHABLE; return (GLenum)0;
  8265. }
  8266. }
  8267. _SOKOL_PRIVATE GLenum _sg_gl_mag_filter(sg_filter mag_f) {
  8268. if (mag_f == SG_FILTER_NEAREST) {
  8269. return GL_NEAREST;
  8270. } else {
  8271. return GL_LINEAR;
  8272. }
  8273. }
  8274. _SOKOL_PRIVATE GLenum _sg_gl_wrap(sg_wrap w) {
  8275. switch (w) {
  8276. case SG_WRAP_CLAMP_TO_EDGE: return GL_CLAMP_TO_EDGE;
  8277. #if defined(SOKOL_GLCORE)
  8278. case SG_WRAP_CLAMP_TO_BORDER: return GL_CLAMP_TO_BORDER;
  8279. #else
  8280. case SG_WRAP_CLAMP_TO_BORDER: return GL_CLAMP_TO_EDGE;
  8281. #endif
  8282. case SG_WRAP_REPEAT: return GL_REPEAT;
  8283. case SG_WRAP_MIRRORED_REPEAT: return GL_MIRRORED_REPEAT;
  8284. default: SOKOL_UNREACHABLE; return 0;
  8285. }
  8286. }
  8287. _SOKOL_PRIVATE GLenum _sg_gl_teximage_type(sg_pixel_format fmt) {
  8288. switch (fmt) {
  8289. case SG_PIXELFORMAT_R8:
  8290. case SG_PIXELFORMAT_R8UI:
  8291. case SG_PIXELFORMAT_RG8:
  8292. case SG_PIXELFORMAT_RG8UI:
  8293. case SG_PIXELFORMAT_RGBA8:
  8294. case SG_PIXELFORMAT_SRGB8A8:
  8295. case SG_PIXELFORMAT_RGBA8UI:
  8296. case SG_PIXELFORMAT_BGRA8:
  8297. return GL_UNSIGNED_BYTE;
  8298. case SG_PIXELFORMAT_R8SN:
  8299. case SG_PIXELFORMAT_R8SI:
  8300. case SG_PIXELFORMAT_RG8SN:
  8301. case SG_PIXELFORMAT_RG8SI:
  8302. case SG_PIXELFORMAT_RGBA8SN:
  8303. case SG_PIXELFORMAT_RGBA8SI:
  8304. return GL_BYTE;
  8305. case SG_PIXELFORMAT_R16:
  8306. case SG_PIXELFORMAT_R16UI:
  8307. case SG_PIXELFORMAT_RG16:
  8308. case SG_PIXELFORMAT_RG16UI:
  8309. case SG_PIXELFORMAT_RGBA16:
  8310. case SG_PIXELFORMAT_RGBA16UI:
  8311. return GL_UNSIGNED_SHORT;
  8312. case SG_PIXELFORMAT_R16SN:
  8313. case SG_PIXELFORMAT_R16SI:
  8314. case SG_PIXELFORMAT_RG16SN:
  8315. case SG_PIXELFORMAT_RG16SI:
  8316. case SG_PIXELFORMAT_RGBA16SN:
  8317. case SG_PIXELFORMAT_RGBA16SI:
  8318. return GL_SHORT;
  8319. case SG_PIXELFORMAT_R16F:
  8320. case SG_PIXELFORMAT_RG16F:
  8321. case SG_PIXELFORMAT_RGBA16F:
  8322. return GL_HALF_FLOAT;
  8323. case SG_PIXELFORMAT_R32UI:
  8324. case SG_PIXELFORMAT_RG32UI:
  8325. case SG_PIXELFORMAT_RGBA32UI:
  8326. return GL_UNSIGNED_INT;
  8327. case SG_PIXELFORMAT_R32SI:
  8328. case SG_PIXELFORMAT_RG32SI:
  8329. case SG_PIXELFORMAT_RGBA32SI:
  8330. return GL_INT;
  8331. case SG_PIXELFORMAT_R32F:
  8332. case SG_PIXELFORMAT_RG32F:
  8333. case SG_PIXELFORMAT_RGBA32F:
  8334. return GL_FLOAT;
  8335. case SG_PIXELFORMAT_RGB10A2:
  8336. return GL_UNSIGNED_INT_2_10_10_10_REV;
  8337. case SG_PIXELFORMAT_RG11B10F:
  8338. return GL_UNSIGNED_INT_10F_11F_11F_REV;
  8339. case SG_PIXELFORMAT_RGB9E5:
  8340. return GL_UNSIGNED_INT_5_9_9_9_REV;
  8341. case SG_PIXELFORMAT_DEPTH:
  8342. return GL_FLOAT;
  8343. case SG_PIXELFORMAT_DEPTH_STENCIL:
  8344. return GL_UNSIGNED_INT_24_8;
  8345. default:
  8346. SOKOL_UNREACHABLE; return 0;
  8347. }
  8348. }
  8349. _SOKOL_PRIVATE GLenum _sg_gl_teximage_format(sg_pixel_format fmt) {
  8350. switch (fmt) {
  8351. case SG_PIXELFORMAT_R8:
  8352. case SG_PIXELFORMAT_R8SN:
  8353. case SG_PIXELFORMAT_R16:
  8354. case SG_PIXELFORMAT_R16SN:
  8355. case SG_PIXELFORMAT_R16F:
  8356. case SG_PIXELFORMAT_R32F:
  8357. return GL_RED;
  8358. case SG_PIXELFORMAT_R8UI:
  8359. case SG_PIXELFORMAT_R8SI:
  8360. case SG_PIXELFORMAT_R16UI:
  8361. case SG_PIXELFORMAT_R16SI:
  8362. case SG_PIXELFORMAT_R32UI:
  8363. case SG_PIXELFORMAT_R32SI:
  8364. return GL_RED_INTEGER;
  8365. case SG_PIXELFORMAT_RG8:
  8366. case SG_PIXELFORMAT_RG8SN:
  8367. case SG_PIXELFORMAT_RG16:
  8368. case SG_PIXELFORMAT_RG16SN:
  8369. case SG_PIXELFORMAT_RG16F:
  8370. case SG_PIXELFORMAT_RG32F:
  8371. return GL_RG;
  8372. case SG_PIXELFORMAT_RG8UI:
  8373. case SG_PIXELFORMAT_RG8SI:
  8374. case SG_PIXELFORMAT_RG16UI:
  8375. case SG_PIXELFORMAT_RG16SI:
  8376. case SG_PIXELFORMAT_RG32UI:
  8377. case SG_PIXELFORMAT_RG32SI:
  8378. return GL_RG_INTEGER;
  8379. case SG_PIXELFORMAT_RGBA8:
  8380. case SG_PIXELFORMAT_SRGB8A8:
  8381. case SG_PIXELFORMAT_RGBA8SN:
  8382. case SG_PIXELFORMAT_RGBA16:
  8383. case SG_PIXELFORMAT_RGBA16SN:
  8384. case SG_PIXELFORMAT_RGBA16F:
  8385. case SG_PIXELFORMAT_RGBA32F:
  8386. case SG_PIXELFORMAT_RGB10A2:
  8387. return GL_RGBA;
  8388. case SG_PIXELFORMAT_RGBA8UI:
  8389. case SG_PIXELFORMAT_RGBA8SI:
  8390. case SG_PIXELFORMAT_RGBA16UI:
  8391. case SG_PIXELFORMAT_RGBA16SI:
  8392. case SG_PIXELFORMAT_RGBA32UI:
  8393. case SG_PIXELFORMAT_RGBA32SI:
  8394. return GL_RGBA_INTEGER;
  8395. case SG_PIXELFORMAT_RG11B10F:
  8396. case SG_PIXELFORMAT_RGB9E5:
  8397. return GL_RGB;
  8398. case SG_PIXELFORMAT_DEPTH:
  8399. return GL_DEPTH_COMPONENT;
  8400. case SG_PIXELFORMAT_DEPTH_STENCIL:
  8401. return GL_DEPTH_STENCIL;
  8402. case SG_PIXELFORMAT_BC1_RGBA:
  8403. return GL_COMPRESSED_RGBA_S3TC_DXT1_EXT;
  8404. case SG_PIXELFORMAT_BC2_RGBA:
  8405. return GL_COMPRESSED_RGBA_S3TC_DXT3_EXT;
  8406. case SG_PIXELFORMAT_BC3_RGBA:
  8407. return GL_COMPRESSED_RGBA_S3TC_DXT5_EXT;
  8408. case SG_PIXELFORMAT_BC3_SRGBA:
  8409. return GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT;
  8410. case SG_PIXELFORMAT_BC4_R:
  8411. return GL_COMPRESSED_RED_RGTC1;
  8412. case SG_PIXELFORMAT_BC4_RSN:
  8413. return GL_COMPRESSED_SIGNED_RED_RGTC1;
  8414. case SG_PIXELFORMAT_BC5_RG:
  8415. return GL_COMPRESSED_RED_GREEN_RGTC2;
  8416. case SG_PIXELFORMAT_BC5_RGSN:
  8417. return GL_COMPRESSED_SIGNED_RED_GREEN_RGTC2;
  8418. case SG_PIXELFORMAT_BC6H_RGBF:
  8419. return GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT_ARB;
  8420. case SG_PIXELFORMAT_BC6H_RGBUF:
  8421. return GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_ARB;
  8422. case SG_PIXELFORMAT_BC7_RGBA:
  8423. return GL_COMPRESSED_RGBA_BPTC_UNORM_ARB;
  8424. case SG_PIXELFORMAT_BC7_SRGBA:
  8425. return GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM_ARB;
  8426. case SG_PIXELFORMAT_ETC2_RGB8:
  8427. return GL_COMPRESSED_RGB8_ETC2;
  8428. case SG_PIXELFORMAT_ETC2_SRGB8:
  8429. return GL_COMPRESSED_SRGB8_ETC2;
  8430. case SG_PIXELFORMAT_ETC2_RGB8A1:
  8431. return GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2;
  8432. case SG_PIXELFORMAT_ETC2_RGBA8:
  8433. return GL_COMPRESSED_RGBA8_ETC2_EAC;
  8434. case SG_PIXELFORMAT_ETC2_SRGB8A8:
  8435. return GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC;
  8436. case SG_PIXELFORMAT_EAC_R11:
  8437. return GL_COMPRESSED_R11_EAC;
  8438. case SG_PIXELFORMAT_EAC_R11SN:
  8439. return GL_COMPRESSED_SIGNED_R11_EAC;
  8440. case SG_PIXELFORMAT_EAC_RG11:
  8441. return GL_COMPRESSED_RG11_EAC;
  8442. case SG_PIXELFORMAT_EAC_RG11SN:
  8443. return GL_COMPRESSED_SIGNED_RG11_EAC;
  8444. case SG_PIXELFORMAT_ASTC_4x4_RGBA:
  8445. return GL_COMPRESSED_RGBA_ASTC_4x4_KHR;
  8446. case SG_PIXELFORMAT_ASTC_4x4_SRGBA:
  8447. return GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR;
  8448. default:
  8449. SOKOL_UNREACHABLE; return 0;
  8450. }
  8451. }
  8452. _SOKOL_PRIVATE GLenum _sg_gl_teximage_internal_format(sg_pixel_format fmt) {
  8453. switch (fmt) {
  8454. case SG_PIXELFORMAT_R8: return GL_R8;
  8455. case SG_PIXELFORMAT_R8SN: return GL_R8_SNORM;
  8456. case SG_PIXELFORMAT_R8UI: return GL_R8UI;
  8457. case SG_PIXELFORMAT_R8SI: return GL_R8I;
  8458. #if !defined(SOKOL_GLES3)
  8459. case SG_PIXELFORMAT_R16: return GL_R16;
  8460. case SG_PIXELFORMAT_R16SN: return GL_R16_SNORM;
  8461. #endif
  8462. case SG_PIXELFORMAT_R16UI: return GL_R16UI;
  8463. case SG_PIXELFORMAT_R16SI: return GL_R16I;
  8464. case SG_PIXELFORMAT_R16F: return GL_R16F;
  8465. case SG_PIXELFORMAT_RG8: return GL_RG8;
  8466. case SG_PIXELFORMAT_RG8SN: return GL_RG8_SNORM;
  8467. case SG_PIXELFORMAT_RG8UI: return GL_RG8UI;
  8468. case SG_PIXELFORMAT_RG8SI: return GL_RG8I;
  8469. case SG_PIXELFORMAT_R32UI: return GL_R32UI;
  8470. case SG_PIXELFORMAT_R32SI: return GL_R32I;
  8471. case SG_PIXELFORMAT_R32F: return GL_R32F;
  8472. #if !defined(SOKOL_GLES3)
  8473. case SG_PIXELFORMAT_RG16: return GL_RG16;
  8474. case SG_PIXELFORMAT_RG16SN: return GL_RG16_SNORM;
  8475. #endif
  8476. case SG_PIXELFORMAT_RG16UI: return GL_RG16UI;
  8477. case SG_PIXELFORMAT_RG16SI: return GL_RG16I;
  8478. case SG_PIXELFORMAT_RG16F: return GL_RG16F;
  8479. case SG_PIXELFORMAT_RGBA8: return GL_RGBA8;
  8480. case SG_PIXELFORMAT_SRGB8A8: return GL_SRGB8_ALPHA8;
  8481. case SG_PIXELFORMAT_RGBA8SN: return GL_RGBA8_SNORM;
  8482. case SG_PIXELFORMAT_RGBA8UI: return GL_RGBA8UI;
  8483. case SG_PIXELFORMAT_RGBA8SI: return GL_RGBA8I;
  8484. case SG_PIXELFORMAT_RGB10A2: return GL_RGB10_A2;
  8485. case SG_PIXELFORMAT_RG11B10F: return GL_R11F_G11F_B10F;
  8486. case SG_PIXELFORMAT_RGB9E5: return GL_RGB9_E5;
  8487. case SG_PIXELFORMAT_RG32UI: return GL_RG32UI;
  8488. case SG_PIXELFORMAT_RG32SI: return GL_RG32I;
  8489. case SG_PIXELFORMAT_RG32F: return GL_RG32F;
  8490. #if !defined(SOKOL_GLES3)
  8491. case SG_PIXELFORMAT_RGBA16: return GL_RGBA16;
  8492. case SG_PIXELFORMAT_RGBA16SN: return GL_RGBA16_SNORM;
  8493. #endif
  8494. case SG_PIXELFORMAT_RGBA16UI: return GL_RGBA16UI;
  8495. case SG_PIXELFORMAT_RGBA16SI: return GL_RGBA16I;
  8496. case SG_PIXELFORMAT_RGBA16F: return GL_RGBA16F;
  8497. case SG_PIXELFORMAT_RGBA32UI: return GL_RGBA32UI;
  8498. case SG_PIXELFORMAT_RGBA32SI: return GL_RGBA32I;
  8499. case SG_PIXELFORMAT_RGBA32F: return GL_RGBA32F;
  8500. case SG_PIXELFORMAT_DEPTH: return GL_DEPTH_COMPONENT32F;
  8501. case SG_PIXELFORMAT_DEPTH_STENCIL: return GL_DEPTH24_STENCIL8;
  8502. case SG_PIXELFORMAT_BC1_RGBA: return GL_COMPRESSED_RGBA_S3TC_DXT1_EXT;
  8503. case SG_PIXELFORMAT_BC2_RGBA: return GL_COMPRESSED_RGBA_S3TC_DXT3_EXT;
  8504. case SG_PIXELFORMAT_BC3_RGBA: return GL_COMPRESSED_RGBA_S3TC_DXT5_EXT;
  8505. case SG_PIXELFORMAT_BC3_SRGBA: return GL_COMPRESSED_SRGB_ALPHA_S3TC_DXT5_EXT;
  8506. case SG_PIXELFORMAT_BC4_R: return GL_COMPRESSED_RED_RGTC1;
  8507. case SG_PIXELFORMAT_BC4_RSN: return GL_COMPRESSED_SIGNED_RED_RGTC1;
  8508. case SG_PIXELFORMAT_BC5_RG: return GL_COMPRESSED_RED_GREEN_RGTC2;
  8509. case SG_PIXELFORMAT_BC5_RGSN: return GL_COMPRESSED_SIGNED_RED_GREEN_RGTC2;
  8510. case SG_PIXELFORMAT_BC6H_RGBF: return GL_COMPRESSED_RGB_BPTC_SIGNED_FLOAT_ARB;
  8511. case SG_PIXELFORMAT_BC6H_RGBUF: return GL_COMPRESSED_RGB_BPTC_UNSIGNED_FLOAT_ARB;
  8512. case SG_PIXELFORMAT_BC7_RGBA: return GL_COMPRESSED_RGBA_BPTC_UNORM_ARB;
  8513. case SG_PIXELFORMAT_BC7_SRGBA: return GL_COMPRESSED_SRGB_ALPHA_BPTC_UNORM_ARB;
  8514. case SG_PIXELFORMAT_ETC2_RGB8: return GL_COMPRESSED_RGB8_ETC2;
  8515. case SG_PIXELFORMAT_ETC2_SRGB8: return GL_COMPRESSED_SRGB8_ETC2;
  8516. case SG_PIXELFORMAT_ETC2_RGB8A1: return GL_COMPRESSED_RGB8_PUNCHTHROUGH_ALPHA1_ETC2;
  8517. case SG_PIXELFORMAT_ETC2_RGBA8: return GL_COMPRESSED_RGBA8_ETC2_EAC;
  8518. case SG_PIXELFORMAT_ETC2_SRGB8A8: return GL_COMPRESSED_SRGB8_ALPHA8_ETC2_EAC;
  8519. case SG_PIXELFORMAT_EAC_R11: return GL_COMPRESSED_R11_EAC;
  8520. case SG_PIXELFORMAT_EAC_R11SN: return GL_COMPRESSED_SIGNED_R11_EAC;
  8521. case SG_PIXELFORMAT_EAC_RG11: return GL_COMPRESSED_RG11_EAC;
  8522. case SG_PIXELFORMAT_EAC_RG11SN: return GL_COMPRESSED_SIGNED_RG11_EAC;
  8523. case SG_PIXELFORMAT_ASTC_4x4_RGBA: return GL_COMPRESSED_RGBA_ASTC_4x4_KHR;
  8524. case SG_PIXELFORMAT_ASTC_4x4_SRGBA: return GL_COMPRESSED_SRGB8_ALPHA8_ASTC_4x4_KHR;
  8525. default: SOKOL_UNREACHABLE; return 0;
  8526. }
  8527. }
  8528. _SOKOL_PRIVATE GLenum _sg_gl_cubeface_target(int face_index) {
  8529. switch (face_index) {
  8530. case 0: return GL_TEXTURE_CUBE_MAP_POSITIVE_X;
  8531. case 1: return GL_TEXTURE_CUBE_MAP_NEGATIVE_X;
  8532. case 2: return GL_TEXTURE_CUBE_MAP_POSITIVE_Y;
  8533. case 3: return GL_TEXTURE_CUBE_MAP_NEGATIVE_Y;
  8534. case 4: return GL_TEXTURE_CUBE_MAP_POSITIVE_Z;
  8535. case 5: return GL_TEXTURE_CUBE_MAP_NEGATIVE_Z;
  8536. default: SOKOL_UNREACHABLE; return 0;
  8537. }
  8538. }
  8539. // see: https://www.khronos.org/registry/OpenGL-Refpages/es3.0/html/glTexImage2D.xhtml
  8540. _SOKOL_PRIVATE void _sg_gl_init_pixelformats(bool has_bgra) {
  8541. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R8]);
  8542. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_R8SN]);
  8543. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_R8UI]);
  8544. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_R8SI]);
  8545. #if !defined(SOKOL_GLES3)
  8546. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R16]);
  8547. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R16SN]);
  8548. #endif
  8549. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_R16UI]);
  8550. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_R16SI]);
  8551. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG8]);
  8552. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RG8SN]);
  8553. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG8UI]);
  8554. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG8SI]);
  8555. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R32UI]);
  8556. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R32SI]);
  8557. #if !defined(SOKOL_GLES3)
  8558. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG16]);
  8559. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG16SN]);
  8560. #endif
  8561. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG16UI]);
  8562. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG16SI]);
  8563. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA8]);
  8564. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_SRGB8A8]);
  8565. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RGBA8SN]);
  8566. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA8UI]);
  8567. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA8SI]);
  8568. if (has_bgra) {
  8569. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_BGRA8]);
  8570. }
  8571. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGB10A2]);
  8572. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RGB9E5]);
  8573. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG32UI]);
  8574. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG32SI]);
  8575. #if !defined(SOKOL_GLES3)
  8576. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA16]);
  8577. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA16SN]);
  8578. #endif
  8579. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA16UI]);
  8580. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA16SI]);
  8581. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA32UI]);
  8582. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA32SI]);
  8583. _sg_pixelformat_srmd(&_sg.formats[SG_PIXELFORMAT_DEPTH]);
  8584. _sg_pixelformat_srmd(&_sg.formats[SG_PIXELFORMAT_DEPTH_STENCIL]);
  8585. }
  8586. // FIXME: OES_half_float_blend
  8587. _SOKOL_PRIVATE void _sg_gl_init_pixelformats_half_float(bool has_colorbuffer_half_float) {
  8588. if (has_colorbuffer_half_float) {
  8589. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R16F]);
  8590. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG16F]);
  8591. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA16F]);
  8592. } else {
  8593. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_R16F]);
  8594. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RG16F]);
  8595. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RGBA16F]);
  8596. }
  8597. }
  8598. _SOKOL_PRIVATE void _sg_gl_init_pixelformats_float(bool has_colorbuffer_float, bool has_texture_float_linear, bool has_float_blend) {
  8599. if (has_texture_float_linear) {
  8600. if (has_colorbuffer_float) {
  8601. if (has_float_blend) {
  8602. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R32F]);
  8603. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  8604. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  8605. } else {
  8606. _sg_pixelformat_sfrm(&_sg.formats[SG_PIXELFORMAT_R32F]);
  8607. _sg_pixelformat_sfrm(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  8608. _sg_pixelformat_sfrm(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  8609. }
  8610. _sg_pixelformat_sfrm(&_sg.formats[SG_PIXELFORMAT_RG11B10F]);
  8611. } else {
  8612. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_R32F]);
  8613. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  8614. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  8615. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RG11B10F]);
  8616. }
  8617. } else {
  8618. if (has_colorbuffer_float) {
  8619. _sg_pixelformat_sbrm(&_sg.formats[SG_PIXELFORMAT_R32F]);
  8620. _sg_pixelformat_sbrm(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  8621. _sg_pixelformat_sbrm(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  8622. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG11B10F]);
  8623. } else {
  8624. _sg_pixelformat_s(&_sg.formats[SG_PIXELFORMAT_R32F]);
  8625. _sg_pixelformat_s(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  8626. _sg_pixelformat_s(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  8627. _sg_pixelformat_s(&_sg.formats[SG_PIXELFORMAT_RG11B10F]);
  8628. }
  8629. }
  8630. }
  8631. _SOKOL_PRIVATE void _sg_gl_init_pixelformats_s3tc(void) {
  8632. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC1_RGBA]);
  8633. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC2_RGBA]);
  8634. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC3_RGBA]);
  8635. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC3_SRGBA]);
  8636. }
  8637. _SOKOL_PRIVATE void _sg_gl_init_pixelformats_rgtc(void) {
  8638. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC4_R]);
  8639. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC4_RSN]);
  8640. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC5_RG]);
  8641. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC5_RGSN]);
  8642. }
  8643. _SOKOL_PRIVATE void _sg_gl_init_pixelformats_bptc(void) {
  8644. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC6H_RGBF]);
  8645. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC6H_RGBUF]);
  8646. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC7_RGBA]);
  8647. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC7_SRGBA]);
  8648. }
  8649. _SOKOL_PRIVATE void _sg_gl_init_pixelformats_etc2(void) {
  8650. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGB8]);
  8651. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_SRGB8]);
  8652. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGB8A1]);
  8653. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGBA8]);
  8654. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_SRGB8A8]);
  8655. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_R11]);
  8656. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_R11SN]);
  8657. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_RG11]);
  8658. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_RG11SN]);
  8659. }
  8660. _SOKOL_PRIVATE void _sg_gl_init_pixelformats_astc(void) {
  8661. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ASTC_4x4_RGBA]);
  8662. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ASTC_4x4_SRGBA]);
  8663. }
  8664. _SOKOL_PRIVATE void _sg_gl_init_pixelformats_compute(void) {
  8665. // using Vulkan's conservative default caps (see: https://github.com/gpuweb/gpuweb/issues/513)
  8666. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8]);
  8667. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8SN]);
  8668. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8UI]);
  8669. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8SI]);
  8670. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16UI]);
  8671. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16SI]);
  8672. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16F]);
  8673. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32UI]);
  8674. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32SI]);
  8675. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32F]);
  8676. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32UI]);
  8677. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32SI]);
  8678. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  8679. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32UI]);
  8680. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32SI]);
  8681. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  8682. }
  8683. _SOKOL_PRIVATE void _sg_gl_init_limits(void) {
  8684. _SG_GL_CHECK_ERROR();
  8685. GLint gl_int;
  8686. glGetIntegerv(GL_MAX_TEXTURE_SIZE, &gl_int);
  8687. _SG_GL_CHECK_ERROR();
  8688. _sg.limits.max_image_size_2d = gl_int;
  8689. _sg.limits.max_image_size_array = gl_int;
  8690. glGetIntegerv(GL_MAX_CUBE_MAP_TEXTURE_SIZE, &gl_int);
  8691. _SG_GL_CHECK_ERROR();
  8692. _sg.limits.max_image_size_cube = gl_int;
  8693. glGetIntegerv(GL_MAX_3D_TEXTURE_SIZE, &gl_int);
  8694. _SG_GL_CHECK_ERROR();
  8695. _sg.limits.max_image_size_3d = gl_int;
  8696. glGetIntegerv(GL_MAX_ARRAY_TEXTURE_LAYERS, &gl_int);
  8697. _SG_GL_CHECK_ERROR();
  8698. _sg.limits.max_image_array_layers = gl_int;
  8699. glGetIntegerv(GL_MAX_VERTEX_ATTRIBS, &gl_int);
  8700. _SG_GL_CHECK_ERROR();
  8701. _sg.limits.max_vertex_attrs = _sg_min(gl_int, SG_MAX_VERTEX_ATTRIBUTES);
  8702. glGetIntegerv(GL_MAX_DRAW_BUFFERS, &gl_int);
  8703. _SG_GL_CHECK_ERROR();
  8704. _sg.limits.max_color_attachments = _sg_min(gl_int, SG_MAX_COLOR_ATTACHMENTS);
  8705. glGetIntegerv(GL_MAX_TEXTURE_IMAGE_UNITS, &gl_int);
  8706. _SG_GL_CHECK_ERROR();
  8707. _sg.limits.max_texture_bindings_per_stage = _sg_min(gl_int, SG_MAX_VIEW_BINDSLOTS);
  8708. #if defined(_SOKOL_GL_HAS_COMPUTE)
  8709. if (_sg.features.compute) {
  8710. glGetIntegerv(GL_MAX_SHADER_STORAGE_BUFFER_BINDINGS, &gl_int);
  8711. _SG_GL_CHECK_ERROR();
  8712. _sg.limits.max_storage_buffer_bindings_per_stage = _sg_min(gl_int, SG_MAX_VIEW_BINDSLOTS);
  8713. glGetIntegerv(GL_MAX_IMAGE_UNITS, &gl_int);
  8714. _SG_GL_CHECK_ERROR();
  8715. _sg.limits.max_storage_image_bindings_per_stage = _sg_min(gl_int, SG_MAX_VIEW_BINDSLOTS);
  8716. }
  8717. #endif
  8718. glGetIntegerv(GL_MAX_VERTEX_UNIFORM_COMPONENTS, &gl_int);
  8719. _SG_GL_CHECK_ERROR();
  8720. _sg.limits.gl_max_vertex_uniform_components = gl_int;
  8721. if (_sg.gl.ext_anisotropic) {
  8722. glGetIntegerv(GL_MAX_TEXTURE_MAX_ANISOTROPY_EXT, &gl_int);
  8723. _SG_GL_CHECK_ERROR();
  8724. _sg.gl.max_anisotropy = gl_int;
  8725. } else {
  8726. _sg.gl.max_anisotropy = 1;
  8727. }
  8728. glGetIntegerv(GL_MAX_COMBINED_TEXTURE_IMAGE_UNITS, &gl_int);
  8729. _SG_GL_CHECK_ERROR();
  8730. _sg.limits.gl_max_combined_texture_image_units = gl_int;
  8731. }
  8732. #if defined(SOKOL_GLCORE)
  8733. _SOKOL_PRIVATE void _sg_gl_init_caps_glcore(void) {
  8734. _sg.backend = SG_BACKEND_GLCORE;
  8735. GLint major_version = 0;
  8736. GLint minor_version = 0;
  8737. glGetIntegerv(GL_MAJOR_VERSION, &major_version);
  8738. glGetIntegerv(GL_MINOR_VERSION, &minor_version);
  8739. const int version = major_version * 100 + minor_version * 10;
  8740. _sg.features.origin_top_left = false;
  8741. _sg.features.image_clamp_to_border = true;
  8742. _sg.features.mrt_independent_blend_state = false;
  8743. _sg.features.mrt_independent_write_mask = true;
  8744. _sg.features.compute = version >= 430;
  8745. _sg.features.gl_texture_views = version >= 430;
  8746. #if defined(__APPLE__)
  8747. _sg.features.msaa_texture_bindings = false;
  8748. #else
  8749. _sg.features.msaa_texture_bindings = true;
  8750. #endif
  8751. _sg.features.draw_base_vertex = version >= 320;
  8752. _sg.features.draw_base_instance = version >= 420;
  8753. // scan extensions
  8754. bool has_s3tc = false; // BC1..BC3
  8755. bool has_rgtc = false; // BC4 and BC5
  8756. bool has_bptc = false; // BC6H and BC7
  8757. bool has_etc2 = false;
  8758. bool has_astc = false;
  8759. GLint num_ext = 0;
  8760. glGetIntegerv(GL_NUM_EXTENSIONS, &num_ext);
  8761. for (int i = 0; i < num_ext; i++) {
  8762. const char* ext = (const char*) glGetStringi(GL_EXTENSIONS, (GLuint)i);
  8763. if (ext) {
  8764. if (strstr(ext, "_texture_compression_s3tc")) {
  8765. has_s3tc = true;
  8766. } else if (strstr(ext, "_texture_compression_rgtc")) {
  8767. has_rgtc = true;
  8768. } else if (strstr(ext, "_texture_compression_bptc")) {
  8769. has_bptc = true;
  8770. } else if (strstr(ext, "_ES3_compatibility")) {
  8771. has_etc2 = true;
  8772. } else if (strstr(ext, "_texture_filter_anisotropic")) {
  8773. _sg.gl.ext_anisotropic = true;
  8774. } else if (strstr(ext, "_texture_compression_astc_ldr")) {
  8775. has_astc = true;
  8776. }
  8777. }
  8778. }
  8779. // limits
  8780. _sg_gl_init_limits();
  8781. // pixel formats
  8782. const bool has_bgra = false; // not a bug
  8783. const bool has_colorbuffer_float = true;
  8784. const bool has_colorbuffer_half_float = true;
  8785. const bool has_texture_float_linear = true; // FIXME???
  8786. const bool has_float_blend = true;
  8787. _sg_gl_init_pixelformats(has_bgra);
  8788. _sg_gl_init_pixelformats_float(has_colorbuffer_float, has_texture_float_linear, has_float_blend);
  8789. _sg_gl_init_pixelformats_half_float(has_colorbuffer_half_float);
  8790. if (has_s3tc) {
  8791. _sg_gl_init_pixelformats_s3tc();
  8792. }
  8793. if (has_rgtc) {
  8794. _sg_gl_init_pixelformats_rgtc();
  8795. }
  8796. if (has_bptc) {
  8797. _sg_gl_init_pixelformats_bptc();
  8798. }
  8799. if (has_etc2) {
  8800. _sg_gl_init_pixelformats_etc2();
  8801. }
  8802. if (has_astc) {
  8803. _sg_gl_init_pixelformats_astc();
  8804. }
  8805. if (_sg.features.compute) {
  8806. _sg_gl_init_pixelformats_compute();
  8807. }
  8808. }
  8809. #endif
  8810. #if defined(SOKOL_GLES3)
  8811. _SOKOL_PRIVATE void _sg_gl_init_caps_gles3(void) {
  8812. _sg.backend = SG_BACKEND_GLES3;
  8813. GLint major_version = 0;
  8814. GLint minor_version = 0;
  8815. glGetIntegerv(GL_MAJOR_VERSION, &major_version);
  8816. glGetIntegerv(GL_MINOR_VERSION, &minor_version);
  8817. const int version = major_version * 100 + minor_version * 10;
  8818. _sg.features.origin_top_left = false;
  8819. _sg.features.image_clamp_to_border = false;
  8820. _sg.features.mrt_independent_blend_state = false;
  8821. _sg.features.mrt_independent_write_mask = false;
  8822. _sg.features.compute = version >= 310;
  8823. _sg.features.msaa_texture_bindings = false;
  8824. _sg.features.gl_texture_views = version >= 430;
  8825. #if defined(__EMSCRIPTEN__)
  8826. _sg.features.separate_buffer_types = true;
  8827. #else
  8828. _sg.features.separate_buffer_types = false;
  8829. #endif
  8830. _sg.features.draw_base_vertex = version >= 320;
  8831. _sg.features.draw_base_instance = false;
  8832. bool has_s3tc = false; // BC1..BC3
  8833. bool has_rgtc = false; // BC4 and BC5
  8834. bool has_bptc = false; // BC6H and BC7
  8835. #if defined(__EMSCRIPTEN__)
  8836. bool has_etc2 = false;
  8837. #else
  8838. bool has_etc2 = true;
  8839. #endif
  8840. bool has_astc = false;
  8841. bool has_colorbuffer_float = false;
  8842. bool has_colorbuffer_half_float = false;
  8843. bool has_texture_float_linear = false;
  8844. bool has_float_blend = false;
  8845. GLint num_ext = 0;
  8846. glGetIntegerv(GL_NUM_EXTENSIONS, &num_ext);
  8847. for (int i = 0; i < num_ext; i++) {
  8848. const char* ext = (const char*) glGetStringi(GL_EXTENSIONS, (GLuint)i);
  8849. if (ext) {
  8850. if (strstr(ext, "_texture_compression_s3tc")) {
  8851. has_s3tc = true;
  8852. } else if (strstr(ext, "_compressed_texture_s3tc")) {
  8853. has_s3tc = true;
  8854. } else if (strstr(ext, "_texture_compression_rgtc")) {
  8855. has_rgtc = true;
  8856. } else if (strstr(ext, "_texture_compression_bptc")) {
  8857. has_bptc = true;
  8858. } else if (strstr(ext, "_compressed_texture_etc")) {
  8859. has_etc2 = true;
  8860. } else if (strstr(ext, "_compressed_texture_astc")) {
  8861. has_astc = true;
  8862. } else if (strstr(ext, "_color_buffer_float")) {
  8863. has_colorbuffer_float = true;
  8864. } else if (strstr(ext, "_color_buffer_half_float")) {
  8865. has_colorbuffer_half_float = true;
  8866. } else if (strstr(ext, "_texture_float_linear")) {
  8867. has_texture_float_linear = true;
  8868. } else if (strstr(ext, "_float_blend")) {
  8869. has_float_blend = true;
  8870. } else if (strstr(ext, "_texture_filter_anisotropic")) {
  8871. _sg.gl.ext_anisotropic = true;
  8872. }
  8873. }
  8874. }
  8875. /* on WebGL2, color_buffer_float also includes 16-bit formats
  8876. see: https://developer.mozilla.org/en-US/docs/Web/API/EXT_color_buffer_float
  8877. */
  8878. #if defined(__EMSCRIPTEN__)
  8879. if (!has_colorbuffer_half_float && has_colorbuffer_float) {
  8880. has_colorbuffer_half_float = has_colorbuffer_float;
  8881. }
  8882. #endif
  8883. // limits
  8884. _sg_gl_init_limits();
  8885. // pixel formats
  8886. const bool has_bgra = false; // not a bug
  8887. _sg_gl_init_pixelformats(has_bgra);
  8888. _sg_gl_init_pixelformats_float(has_colorbuffer_float, has_texture_float_linear, has_float_blend);
  8889. _sg_gl_init_pixelformats_half_float(has_colorbuffer_half_float);
  8890. if (has_s3tc) {
  8891. _sg_gl_init_pixelformats_s3tc();
  8892. }
  8893. if (has_rgtc) {
  8894. _sg_gl_init_pixelformats_rgtc();
  8895. }
  8896. if (has_bptc) {
  8897. _sg_gl_init_pixelformats_bptc();
  8898. }
  8899. if (has_etc2) {
  8900. _sg_gl_init_pixelformats_etc2();
  8901. }
  8902. if (has_astc) {
  8903. _sg_gl_init_pixelformats_astc();
  8904. }
  8905. if (_sg.features.compute) {
  8906. _sg_gl_init_pixelformats_compute();
  8907. }
  8908. }
  8909. #endif
  8910. //-- state cache implementation ------------------------------------------------
  8911. _SOKOL_PRIVATE void _sg_gl_cache_clear_buffer_bindings(bool force) {
  8912. if (force || (_sg.gl.cache.vertex_buffer != 0)) {
  8913. glBindBuffer(GL_ARRAY_BUFFER, 0);
  8914. _sg.gl.cache.vertex_buffer = 0;
  8915. _sg_stats_inc(gl.num_bind_buffer);
  8916. }
  8917. if (force || (_sg.gl.cache.index_buffer != 0)) {
  8918. glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
  8919. _sg.gl.cache.index_buffer = 0;
  8920. _sg_stats_inc(gl.num_bind_buffer);
  8921. }
  8922. if (force || (_sg.gl.cache.storage_buffer != 0)) {
  8923. if (_sg.features.compute) {
  8924. glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0);
  8925. }
  8926. _sg.gl.cache.storage_buffer = 0;
  8927. _sg_stats_inc(gl.num_bind_buffer);
  8928. }
  8929. for (int i = 0; i < _SG_GL_MAX_SBUF_BINDINGS; i++) {
  8930. if (force || (_sg.gl.cache.storage_buffers[i] != 0)) {
  8931. if (_sg.features.compute && (i < _sg.limits.max_storage_buffer_bindings_per_stage)) {
  8932. glBindBufferBase(GL_SHADER_STORAGE_BUFFER, (GLuint)i, 0);
  8933. }
  8934. _sg.gl.cache.storage_buffers[i] = 0;
  8935. _sg_stats_inc(gl.num_bind_buffer);
  8936. }
  8937. }
  8938. }
  8939. _SOKOL_PRIVATE void _sg_gl_cache_bind_buffer(GLenum target, GLuint buffer) {
  8940. SOKOL_ASSERT((GL_ARRAY_BUFFER == target) || (GL_ELEMENT_ARRAY_BUFFER == target) || (GL_SHADER_STORAGE_BUFFER == target));
  8941. if (target == GL_ARRAY_BUFFER) {
  8942. if (_sg.gl.cache.vertex_buffer != buffer) {
  8943. _sg.gl.cache.vertex_buffer = buffer;
  8944. glBindBuffer(target, buffer);
  8945. _sg_stats_inc(gl.num_bind_buffer);
  8946. }
  8947. } else if (target == GL_ELEMENT_ARRAY_BUFFER) {
  8948. if (_sg.gl.cache.index_buffer != buffer) {
  8949. _sg.gl.cache.index_buffer = buffer;
  8950. glBindBuffer(target, buffer);
  8951. _sg_stats_inc(gl.num_bind_buffer);
  8952. }
  8953. } else if (target == GL_SHADER_STORAGE_BUFFER) {
  8954. if (_sg.gl.cache.storage_buffer != buffer) {
  8955. _sg.gl.cache.storage_buffer = buffer;
  8956. if (_sg.features.compute) {
  8957. glBindBuffer(target, buffer);
  8958. }
  8959. _sg_stats_inc(gl.num_bind_buffer);
  8960. }
  8961. } else {
  8962. SOKOL_UNREACHABLE;
  8963. }
  8964. }
  8965. _SOKOL_PRIVATE void _sg_gl_cache_bind_storage_buffer(uint8_t glsl_binding_n, GLuint buffer, int offset, int buf_size) {
  8966. SOKOL_ASSERT(glsl_binding_n < _SG_GL_MAX_SBUF_BINDINGS);
  8967. SOKOL_ASSERT(offset < buf_size);
  8968. const bool buf_neql = _sg.gl.cache.storage_buffers[glsl_binding_n] != buffer;
  8969. const bool off_neql = _sg.gl.cache.storage_buffer_offsets[glsl_binding_n] != offset;
  8970. if (buf_neql || off_neql) {
  8971. _sg.gl.cache.storage_buffers[glsl_binding_n] = buffer;
  8972. _sg.gl.cache.storage_buffer_offsets[glsl_binding_n] = offset;
  8973. _sg.gl.cache.storage_buffer = buffer; // not a bug
  8974. if (_sg.features.compute) {
  8975. SOKOL_ASSERT(glsl_binding_n < _sg.limits.max_storage_buffer_bindings_per_stage);
  8976. glBindBufferRange(GL_SHADER_STORAGE_BUFFER, glsl_binding_n, buffer, offset, buf_size - offset);
  8977. }
  8978. _sg_stats_inc(gl.num_bind_buffer);
  8979. }
  8980. }
  8981. _SOKOL_PRIVATE void _sg_gl_cache_store_buffer_binding(GLenum target) {
  8982. if (target == GL_ARRAY_BUFFER) {
  8983. _sg.gl.cache.stored_vertex_buffer = _sg.gl.cache.vertex_buffer;
  8984. } else if (target == GL_ELEMENT_ARRAY_BUFFER) {
  8985. _sg.gl.cache.stored_index_buffer = _sg.gl.cache.index_buffer;
  8986. } else if (target == GL_SHADER_STORAGE_BUFFER) {
  8987. _sg.gl.cache.stored_storage_buffer = _sg.gl.cache.storage_buffer;
  8988. } else {
  8989. SOKOL_UNREACHABLE;
  8990. }
  8991. }
  8992. _SOKOL_PRIVATE void _sg_gl_cache_restore_buffer_binding(GLenum target) {
  8993. if (target == GL_ARRAY_BUFFER) {
  8994. if (_sg.gl.cache.stored_vertex_buffer != 0) {
  8995. // we only care about restoring valid ids
  8996. _sg_gl_cache_bind_buffer(target, _sg.gl.cache.stored_vertex_buffer);
  8997. _sg.gl.cache.stored_vertex_buffer = 0;
  8998. }
  8999. } else if (target == GL_ELEMENT_ARRAY_BUFFER) {
  9000. if (_sg.gl.cache.stored_index_buffer != 0) {
  9001. // we only care about restoring valid ids
  9002. _sg_gl_cache_bind_buffer(target, _sg.gl.cache.stored_index_buffer);
  9003. _sg.gl.cache.stored_index_buffer = 0;
  9004. }
  9005. } else if (target == GL_SHADER_STORAGE_BUFFER) {
  9006. if (_sg.gl.cache.stored_storage_buffer != 0) {
  9007. // we only care about restoring valid ids
  9008. _sg_gl_cache_bind_buffer(target, _sg.gl.cache.stored_storage_buffer);
  9009. _sg.gl.cache.stored_storage_buffer = 0;
  9010. }
  9011. } else {
  9012. SOKOL_UNREACHABLE;
  9013. }
  9014. }
  9015. // called from _sg_gl_discard_buffer()
  9016. _SOKOL_PRIVATE void _sg_gl_cache_invalidate_buffer(GLuint buf) {
  9017. if (buf == _sg.gl.cache.vertex_buffer) {
  9018. _sg.gl.cache.vertex_buffer = 0;
  9019. glBindBuffer(GL_ARRAY_BUFFER, 0);
  9020. _sg_stats_inc(gl.num_bind_buffer);
  9021. }
  9022. if (buf == _sg.gl.cache.index_buffer) {
  9023. _sg.gl.cache.index_buffer = 0;
  9024. glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
  9025. _sg_stats_inc(gl.num_bind_buffer);
  9026. }
  9027. if (buf == _sg.gl.cache.storage_buffer) {
  9028. _sg.gl.cache.storage_buffer = 0;
  9029. glBindBuffer(GL_SHADER_STORAGE_BUFFER, 0);
  9030. _sg_stats_inc(gl.num_bind_buffer);
  9031. }
  9032. for (int i = 0; i < _SG_GL_MAX_SBUF_BINDINGS; i++) {
  9033. if (buf == _sg.gl.cache.storage_buffers[i]) {
  9034. _sg.gl.cache.storage_buffers[i] = 0;
  9035. _sg.gl.cache.storage_buffer = 0; // not a bug!
  9036. if (_sg.features.compute && (i < _sg.limits.max_storage_buffer_bindings_per_stage)) {
  9037. glBindBufferBase(GL_SHADER_STORAGE_BUFFER, (GLuint)i, 0);
  9038. }
  9039. _sg_stats_inc(gl.num_bind_buffer);
  9040. }
  9041. }
  9042. if (buf == _sg.gl.cache.stored_vertex_buffer) {
  9043. _sg.gl.cache.stored_vertex_buffer = 0;
  9044. }
  9045. if (buf == _sg.gl.cache.stored_index_buffer) {
  9046. _sg.gl.cache.stored_index_buffer = 0;
  9047. }
  9048. if (buf == _sg.gl.cache.stored_storage_buffer) {
  9049. _sg.gl.cache.stored_storage_buffer = 0;
  9050. }
  9051. for (int i = 0; i < SG_MAX_VERTEX_ATTRIBUTES; i++) {
  9052. if (buf == _sg.gl.cache.attrs[i].gl_vbuf) {
  9053. _sg.gl.cache.attrs[i].gl_vbuf = 0;
  9054. }
  9055. }
  9056. }
  9057. _SOKOL_PRIVATE void _sg_gl_cache_active_texture(GLenum texture) {
  9058. _SG_GL_CHECK_ERROR();
  9059. if (_sg.gl.cache.cur_active_texture != texture) {
  9060. _sg.gl.cache.cur_active_texture = texture;
  9061. glActiveTexture(texture);
  9062. _sg_stats_inc(gl.num_active_texture);
  9063. }
  9064. _SG_GL_CHECK_ERROR();
  9065. }
  9066. _SOKOL_PRIVATE void _sg_gl_cache_clear_texture_sampler_bindings(bool force) {
  9067. _SG_GL_CHECK_ERROR();
  9068. for (int i = 0; (i < _SG_GL_MAX_TEX_SMP_BINDINGS) && (i < _sg.limits.gl_max_combined_texture_image_units); i++) {
  9069. if (force || (_sg.gl.cache.texture_samplers[i].texture != 0)) {
  9070. GLenum gl_texture_unit = (GLenum) (GL_TEXTURE0 + i);
  9071. glActiveTexture(gl_texture_unit);
  9072. _sg_stats_inc(gl.num_active_texture);
  9073. glBindTexture(GL_TEXTURE_2D, 0);
  9074. glBindTexture(GL_TEXTURE_CUBE_MAP, 0);
  9075. glBindTexture(GL_TEXTURE_3D, 0);
  9076. glBindTexture(GL_TEXTURE_2D_ARRAY, 0);
  9077. _sg_stats_add(gl.num_bind_texture, 4);
  9078. glBindSampler((GLuint)i, 0);
  9079. _sg_stats_inc(gl.num_bind_sampler);
  9080. _sg.gl.cache.texture_samplers[i].target = 0;
  9081. _sg.gl.cache.texture_samplers[i].texture = 0;
  9082. _sg.gl.cache.texture_samplers[i].sampler = 0;
  9083. _sg.gl.cache.cur_active_texture = gl_texture_unit;
  9084. }
  9085. }
  9086. _SG_GL_CHECK_ERROR();
  9087. }
  9088. _SOKOL_PRIVATE void _sg_gl_cache_bind_texture_sampler(int8_t gl_tex_slot, GLenum target, GLuint texture, GLuint sampler) {
  9089. /* it's valid to call this function with target=0 and/or texture=0
  9090. target=0 will unbind the previous binding, texture=0 will clear
  9091. the new binding
  9092. */
  9093. SOKOL_ASSERT((gl_tex_slot >= 0) && (gl_tex_slot < _SG_GL_MAX_TEX_SMP_BINDINGS));
  9094. if (gl_tex_slot >= _sg.limits.gl_max_combined_texture_image_units) {
  9095. return;
  9096. }
  9097. _SG_GL_CHECK_ERROR();
  9098. _sg_gl_cache_texture_sampler_bind_slot* slot = &_sg.gl.cache.texture_samplers[gl_tex_slot];
  9099. if ((slot->target != target) || (slot->texture != texture) || (slot->sampler != sampler)) {
  9100. _sg_gl_cache_active_texture((GLenum)(GL_TEXTURE0 + gl_tex_slot));
  9101. // if the target has changed, clear the previous binding on that target
  9102. if ((target != slot->target) && (slot->target != 0)) {
  9103. glBindTexture(slot->target, 0);
  9104. _SG_GL_CHECK_ERROR();
  9105. _sg_stats_inc(gl.num_bind_texture);
  9106. }
  9107. // apply new binding (can be 0 to unbind)
  9108. if (target != 0) {
  9109. glBindTexture(target, texture);
  9110. _SG_GL_CHECK_ERROR();
  9111. _sg_stats_inc(gl.num_bind_texture);
  9112. }
  9113. // apply new sampler (can be 0 to unbind)
  9114. glBindSampler((GLuint)gl_tex_slot, sampler);
  9115. _SG_GL_CHECK_ERROR();
  9116. _sg_stats_inc(gl.num_bind_sampler);
  9117. slot->target = target;
  9118. slot->texture = texture;
  9119. slot->sampler = sampler;
  9120. }
  9121. }
  9122. _SOKOL_PRIVATE void _sg_gl_cache_store_texture_sampler_binding(int8_t gl_tex_slot) {
  9123. SOKOL_ASSERT((gl_tex_slot >= 0) && (gl_tex_slot < _SG_GL_MAX_TEX_SMP_BINDINGS));
  9124. _sg.gl.cache.stored_texture_sampler = _sg.gl.cache.texture_samplers[gl_tex_slot];
  9125. }
  9126. _SOKOL_PRIVATE void _sg_gl_cache_restore_texture_sampler_binding(int8_t gl_tex_slot) {
  9127. SOKOL_ASSERT((gl_tex_slot >= 0) && (gl_tex_slot < _SG_GL_MAX_TEX_SMP_BINDINGS));
  9128. _sg_gl_cache_texture_sampler_bind_slot* slot = &_sg.gl.cache.stored_texture_sampler;
  9129. if (slot->texture != 0) {
  9130. // we only care about restoring valid ids
  9131. SOKOL_ASSERT(slot->target != 0);
  9132. _sg_gl_cache_bind_texture_sampler(gl_tex_slot, slot->target, slot->texture, slot->sampler);
  9133. slot->target = 0;
  9134. slot->texture = 0;
  9135. slot->sampler = 0;
  9136. }
  9137. }
  9138. // called from _sg_gl_discard_texture() and _sg_gl_discard_sampler()
  9139. _SOKOL_PRIVATE void _sg_gl_cache_invalidate_texture_sampler(GLuint tex, GLuint smp) {
  9140. _SG_GL_CHECK_ERROR();
  9141. for (size_t i = 0; i < _SG_GL_MAX_TEX_SMP_BINDINGS; i++) {
  9142. _sg_gl_cache_texture_sampler_bind_slot* slot = &_sg.gl.cache.texture_samplers[i];
  9143. if ((0 != slot->target) && ((tex == slot->texture) || (smp == slot->sampler))) {
  9144. _sg_gl_cache_active_texture((GLenum)(GL_TEXTURE0 + i));
  9145. glBindTexture(slot->target, 0);
  9146. _SG_GL_CHECK_ERROR();
  9147. _sg_stats_inc(gl.num_bind_texture);
  9148. glBindSampler((GLuint)i, 0);
  9149. _SG_GL_CHECK_ERROR();
  9150. _sg_stats_inc(gl.num_bind_sampler);
  9151. slot->target = 0;
  9152. slot->texture = 0;
  9153. slot->sampler = 0;
  9154. }
  9155. }
  9156. if ((tex == _sg.gl.cache.stored_texture_sampler.texture) || (smp == _sg.gl.cache.stored_texture_sampler.sampler)) {
  9157. _sg.gl.cache.stored_texture_sampler.target = 0;
  9158. _sg.gl.cache.stored_texture_sampler.texture = 0;
  9159. _sg.gl.cache.stored_texture_sampler.sampler = 0;
  9160. }
  9161. }
  9162. // called from _sg_gl_discard_shader()
  9163. _SOKOL_PRIVATE void _sg_gl_cache_invalidate_program(GLuint prog) {
  9164. if (prog == _sg.gl.cache.prog) {
  9165. _sg.gl.cache.prog = 0;
  9166. glUseProgram(0);
  9167. _sg_stats_inc(gl.num_use_program);
  9168. }
  9169. }
  9170. // called from _sg_gl_discard_pipeline()
  9171. _SOKOL_PRIVATE void _sg_gl_cache_invalidate_pipeline(_sg_pipeline_t* pip) {
  9172. if (_sg_sref_slot_eql(&_sg.gl.cache.cur_pip, &pip->slot)) {
  9173. _sg.gl.cache.cur_pip = _sg_sref(0);
  9174. }
  9175. }
  9176. _SOKOL_PRIVATE void _sg_gl_reset_state_cache(void) {
  9177. _SG_GL_CHECK_ERROR();
  9178. glBindVertexArray(_sg.gl.vao);
  9179. _SG_GL_CHECK_ERROR();
  9180. _sg_clear(&_sg.gl.cache, sizeof(_sg.gl.cache));
  9181. _sg_gl_cache_clear_buffer_bindings(true);
  9182. _SG_GL_CHECK_ERROR();
  9183. _sg_gl_cache_clear_texture_sampler_bindings(true);
  9184. _SG_GL_CHECK_ERROR();
  9185. for (int i = 0; i < _sg.limits.max_vertex_attrs; i++) {
  9186. _sg_gl_attr_t* attr = &_sg.gl.cache.attrs[i].gl_attr;
  9187. attr->vb_index = -1;
  9188. attr->divisor = -1;
  9189. glDisableVertexAttribArray((GLuint)i);
  9190. _SG_GL_CHECK_ERROR();
  9191. _sg_stats_inc(gl.num_disable_vertex_attrib_array);
  9192. }
  9193. _sg.gl.cache.cur_primitive_type = GL_TRIANGLES;
  9194. // shader program
  9195. glGetIntegerv(GL_CURRENT_PROGRAM, (GLint*)&_sg.gl.cache.prog);
  9196. _SG_GL_CHECK_ERROR();
  9197. // depth and stencil state
  9198. _sg.gl.cache.depth.compare = SG_COMPAREFUNC_ALWAYS;
  9199. _sg.gl.cache.stencil.front.compare = SG_COMPAREFUNC_ALWAYS;
  9200. _sg.gl.cache.stencil.front.fail_op = SG_STENCILOP_KEEP;
  9201. _sg.gl.cache.stencil.front.depth_fail_op = SG_STENCILOP_KEEP;
  9202. _sg.gl.cache.stencil.front.pass_op = SG_STENCILOP_KEEP;
  9203. _sg.gl.cache.stencil.back.compare = SG_COMPAREFUNC_ALWAYS;
  9204. _sg.gl.cache.stencil.back.fail_op = SG_STENCILOP_KEEP;
  9205. _sg.gl.cache.stencil.back.depth_fail_op = SG_STENCILOP_KEEP;
  9206. _sg.gl.cache.stencil.back.pass_op = SG_STENCILOP_KEEP;
  9207. glEnable(GL_DEPTH_TEST);
  9208. glDepthFunc(GL_ALWAYS);
  9209. glDepthMask(GL_FALSE);
  9210. glDisable(GL_STENCIL_TEST);
  9211. glStencilFunc(GL_ALWAYS, 0, 0);
  9212. glStencilOp(GL_KEEP, GL_KEEP, GL_KEEP);
  9213. glStencilMask(0);
  9214. _sg_stats_add(gl.num_render_state, 7);
  9215. // blend state
  9216. _sg.gl.cache.blend.src_factor_rgb = SG_BLENDFACTOR_ONE;
  9217. _sg.gl.cache.blend.dst_factor_rgb = SG_BLENDFACTOR_ZERO;
  9218. _sg.gl.cache.blend.op_rgb = SG_BLENDOP_ADD;
  9219. _sg.gl.cache.blend.src_factor_alpha = SG_BLENDFACTOR_ONE;
  9220. _sg.gl.cache.blend.dst_factor_alpha = SG_BLENDFACTOR_ZERO;
  9221. _sg.gl.cache.blend.op_alpha = SG_BLENDOP_ADD;
  9222. glDisable(GL_BLEND);
  9223. glBlendFuncSeparate(GL_ONE, GL_ZERO, GL_ONE, GL_ZERO);
  9224. glBlendEquationSeparate(GL_FUNC_ADD, GL_FUNC_ADD);
  9225. glBlendColor(0.0f, 0.0f, 0.0f, 0.0f);
  9226. _sg_stats_add(gl.num_render_state, 4);
  9227. // standalone state
  9228. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  9229. _sg.gl.cache.color_write_mask[i] = SG_COLORMASK_RGBA;
  9230. }
  9231. _sg.gl.cache.cull_mode = SG_CULLMODE_NONE;
  9232. _sg.gl.cache.face_winding = SG_FACEWINDING_CW;
  9233. _sg.gl.cache.sample_count = 1;
  9234. glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
  9235. glPolygonOffset(0.0f, 0.0f);
  9236. glDisable(GL_POLYGON_OFFSET_FILL);
  9237. glDisable(GL_CULL_FACE);
  9238. glFrontFace(GL_CW);
  9239. glCullFace(GL_BACK);
  9240. glEnable(GL_SCISSOR_TEST);
  9241. glDisable(GL_SAMPLE_ALPHA_TO_COVERAGE);
  9242. glEnable(GL_DITHER);
  9243. glDisable(GL_POLYGON_OFFSET_FILL);
  9244. _sg_stats_add(gl.num_render_state, 10);
  9245. #if defined(SOKOL_GLCORE)
  9246. glEnable(GL_MULTISAMPLE);
  9247. glEnable(GL_PROGRAM_POINT_SIZE);
  9248. _sg_stats_add(gl.num_render_state, 2);
  9249. #endif
  9250. }
  9251. _SOKOL_PRIVATE void _sg_gl_setup_backend(const sg_desc* desc) {
  9252. _SOKOL_UNUSED(desc);
  9253. // assumes that _sg.gl is already zero-initialized
  9254. _sg.gl.valid = true;
  9255. #if defined(_SOKOL_USE_WIN32_GL_LOADER)
  9256. _sg_gl_load_opengl();
  9257. #endif
  9258. // clear initial GL error state
  9259. #if defined(SOKOL_DEBUG)
  9260. while (glGetError() != GL_NO_ERROR);
  9261. #endif
  9262. #if defined(SOKOL_GLCORE)
  9263. _sg_gl_init_caps_glcore();
  9264. #elif defined(SOKOL_GLES3)
  9265. _sg_gl_init_caps_gles3();
  9266. #endif
  9267. // create and bind global vertex array object which will be mutated as needed
  9268. glGenVertexArrays(1, &_sg.gl.vao);
  9269. glBindVertexArray(_sg.gl.vao);
  9270. _SG_GL_CHECK_ERROR();
  9271. // create global framebuffer object which will be mutated as needed
  9272. glGenFramebuffers(1, &_sg.gl.fb);
  9273. _SG_GL_CHECK_ERROR();
  9274. // incoming texture data is generally expected to be packed tightly
  9275. glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
  9276. #if defined(SOKOL_GLCORE)
  9277. // enable seamless cubemap sampling (only desktop GL)
  9278. glEnable(GL_TEXTURE_CUBE_MAP_SEAMLESS);
  9279. #endif
  9280. _sg_gl_reset_state_cache();
  9281. }
  9282. _SOKOL_PRIVATE void _sg_gl_discard_backend(void) {
  9283. SOKOL_ASSERT(_sg.gl.valid);
  9284. if (_sg.gl.fb) {
  9285. glDeleteFramebuffers(1, &_sg.gl.fb);
  9286. }
  9287. if (_sg.gl.vao) {
  9288. glDeleteVertexArrays(1, &_sg.gl.vao);
  9289. }
  9290. #if defined(_SOKOL_USE_WIN32_GL_LOADER)
  9291. _sg_gl_unload_opengl();
  9292. #endif
  9293. _sg.gl.valid = false;
  9294. }
  9295. //-- GL backend resource creation and destruction ------------------------------
  9296. _SOKOL_PRIVATE sg_resource_state _sg_gl_create_buffer(_sg_buffer_t* buf, const sg_buffer_desc* desc) {
  9297. SOKOL_ASSERT(buf && desc);
  9298. _SG_GL_CHECK_ERROR();
  9299. buf->gl.injected = (0 != desc->gl_buffers[0]);
  9300. const GLenum gl_target = _sg_gl_buffer_target(&buf->cmn.usage);
  9301. const GLenum gl_usage = _sg_gl_buffer_usage(&buf->cmn.usage);
  9302. for (int slot = 0; slot < buf->cmn.num_slots; slot++) {
  9303. GLuint gl_buf = 0;
  9304. if (buf->gl.injected) {
  9305. SOKOL_ASSERT(desc->gl_buffers[slot]);
  9306. gl_buf = desc->gl_buffers[slot];
  9307. } else {
  9308. glGenBuffers(1, &gl_buf);
  9309. SOKOL_ASSERT(gl_buf);
  9310. _sg_gl_cache_store_buffer_binding(gl_target);
  9311. _sg_gl_cache_bind_buffer(gl_target, gl_buf);
  9312. glBufferData(gl_target, buf->cmn.size, 0, gl_usage);
  9313. if (desc->data.ptr) {
  9314. glBufferSubData(gl_target, 0, buf->cmn.size, desc->data.ptr);
  9315. }
  9316. _sg_gl_cache_restore_buffer_binding(gl_target);
  9317. }
  9318. buf->gl.buf[slot] = gl_buf;
  9319. }
  9320. _SG_GL_CHECK_ERROR();
  9321. return SG_RESOURCESTATE_VALID;
  9322. }
  9323. _SOKOL_PRIVATE void _sg_gl_discard_buffer(_sg_buffer_t* buf) {
  9324. SOKOL_ASSERT(buf);
  9325. _SG_GL_CHECK_ERROR();
  9326. for (int slot = 0; slot < buf->cmn.num_slots; slot++) {
  9327. if (buf->gl.buf[slot]) {
  9328. _sg_gl_cache_invalidate_buffer(buf->gl.buf[slot]);
  9329. if (!buf->gl.injected) {
  9330. glDeleteBuffers(1, &buf->gl.buf[slot]);
  9331. }
  9332. }
  9333. }
  9334. _SG_GL_CHECK_ERROR();
  9335. }
  9336. _SOKOL_PRIVATE bool _sg_gl_supported_texture_format(sg_pixel_format fmt) {
  9337. const int fmt_index = (int) fmt;
  9338. SOKOL_ASSERT((fmt_index > SG_PIXELFORMAT_NONE) && (fmt_index < _SG_PIXELFORMAT_NUM));
  9339. return _sg.formats[fmt_index].sample;
  9340. }
  9341. _SOKOL_PRIVATE void _sg_gl_texstorage(const _sg_image_t* img) {
  9342. const GLenum tgt = img->gl.target;
  9343. const int num_mips = img->cmn.num_mipmaps;
  9344. #if defined(_SOKOL_GL_HAS_TEXSTORAGE)
  9345. const GLenum ifmt = _sg_gl_teximage_internal_format(img->cmn.pixel_format);
  9346. const bool msaa = img->cmn.sample_count > 1;
  9347. const int w = img->cmn.width;
  9348. const int h = img->cmn.height;
  9349. if ((SG_IMAGETYPE_2D == img->cmn.type) || (SG_IMAGETYPE_CUBE == img->cmn.type)) {
  9350. #if defined(SOKOL_GLCORE)
  9351. if (msaa) {
  9352. glTexStorage2DMultisample(tgt, img->cmn.sample_count, ifmt, w, h, GL_TRUE);
  9353. } else {
  9354. glTexStorage2D(tgt, num_mips, ifmt, w, h);
  9355. }
  9356. #else
  9357. SOKOL_ASSERT(!msaa); _SOKOL_UNUSED(msaa);
  9358. glTexStorage2D(tgt, num_mips, ifmt, w, h);
  9359. #endif
  9360. } else if ((SG_IMAGETYPE_3D == img->cmn.type) || (SG_IMAGETYPE_ARRAY == img->cmn.type)) {
  9361. const int depth = img->cmn.num_slices;
  9362. #if defined(SOKOL_GLCORE)
  9363. if (msaa) {
  9364. // NOTE: MSAA works only for array textures, not 3D textures
  9365. glTexStorage3DMultisample(tgt, img->cmn.sample_count, ifmt, w, h, depth, GL_TRUE);
  9366. } else {
  9367. glTexStorage3D(tgt, num_mips, ifmt, w, h, depth);
  9368. }
  9369. #else
  9370. SOKOL_ASSERT(!msaa); _SOKOL_UNUSED(msaa);
  9371. glTexStorage3D(tgt, num_mips, ifmt, w, h, depth);
  9372. #endif
  9373. }
  9374. #else
  9375. glTexParameteri(tgt, GL_TEXTURE_MAX_LEVEL, num_mips - 1);
  9376. #endif
  9377. _SG_GL_CHECK_ERROR();
  9378. }
  9379. _SOKOL_PRIVATE void _sg_gl_texsubimage(const _sg_image_t* img, GLenum tgt, int mip_index, int w, int h, int depth, const GLvoid* data_ptr, GLsizei data_size) {
  9380. SOKOL_ASSERT(data_ptr && (data_size > 0));
  9381. SOKOL_ASSERT(img->cmn.sample_count == 1);
  9382. const bool compressed = _sg_is_compressed_pixel_format(img->cmn.pixel_format);
  9383. if ((SG_IMAGETYPE_2D == img->cmn.type) || (SG_IMAGETYPE_CUBE == img->cmn.type)) {
  9384. if (compressed) {
  9385. const GLenum ifmt = _sg_gl_teximage_internal_format(img->cmn.pixel_format);
  9386. glCompressedTexSubImage2D(tgt, mip_index, 0, 0, w, h, ifmt, data_size, data_ptr);
  9387. } else {
  9388. const GLenum type = _sg_gl_teximage_type(img->cmn.pixel_format);
  9389. const GLenum fmt = _sg_gl_teximage_format(img->cmn.pixel_format);
  9390. glTexSubImage2D(tgt, mip_index, 0, 0, w, h, fmt, type, data_ptr);
  9391. }
  9392. } else if ((SG_IMAGETYPE_3D == img->cmn.type) || (SG_IMAGETYPE_ARRAY == img->cmn.type)) {
  9393. if (compressed) {
  9394. const GLenum ifmt = _sg_gl_teximage_internal_format(img->cmn.pixel_format);
  9395. glCompressedTexSubImage3D(tgt, mip_index, 0, 0, 0, w, h, depth, ifmt, data_size, data_ptr);
  9396. } else {
  9397. const GLenum type = _sg_gl_teximage_type(img->cmn.pixel_format);
  9398. const GLenum fmt = _sg_gl_teximage_format(img->cmn.pixel_format);
  9399. glTexSubImage3D(tgt, mip_index, 0, 0, 0, w, h, depth, fmt, type, data_ptr);
  9400. }
  9401. }
  9402. }
  9403. _SOKOL_PRIVATE void _sg_gl_teximage(const _sg_image_t* img, GLenum tgt, int mip_index, int w, int h, int depth, const GLvoid* data_ptr, GLsizei data_size) {
  9404. #if defined(_SOKOL_GL_HAS_TEXSTORAGE)
  9405. if (data_ptr == 0) {
  9406. return;
  9407. }
  9408. _sg_gl_texsubimage(img, tgt, mip_index, w, h, depth, data_ptr, data_size);
  9409. #else
  9410. const bool compressed = _sg_is_compressed_pixel_format(img->cmn.pixel_format);
  9411. const GLenum ifmt = _sg_gl_teximage_internal_format(img->cmn.pixel_format);
  9412. const bool msaa = img->cmn.sample_count > 1;
  9413. if ((SG_IMAGETYPE_2D == img->cmn.type) || (SG_IMAGETYPE_CUBE == img->cmn.type)) {
  9414. if (compressed) {
  9415. SOKOL_ASSERT(!msaa); _SOKOL_UNUSED(msaa);
  9416. glCompressedTexImage2D(tgt, mip_index, ifmt, w, h, 0, data_size, data_ptr);
  9417. } else {
  9418. const GLenum type = _sg_gl_teximage_type(img->cmn.pixel_format);
  9419. const GLenum fmt = _sg_gl_teximage_format(img->cmn.pixel_format);
  9420. #if defined(SOKOL_GLCORE) && !defined(__APPLE__)
  9421. if (msaa) {
  9422. glTexImage2DMultisample(tgt, img->cmn.sample_count, ifmt, w, h, GL_TRUE);
  9423. } else {
  9424. glTexImage2D(tgt, mip_index, (GLint)ifmt, w, h, 0, fmt, type, data_ptr);
  9425. }
  9426. #else
  9427. SOKOL_ASSERT(!msaa); _SOKOL_UNUSED(msaa);
  9428. glTexImage2D(tgt, mip_index, (GLint)ifmt, w, h, 0, fmt, type, data_ptr);
  9429. #endif
  9430. }
  9431. } else if ((SG_IMAGETYPE_3D == img->cmn.type) || (SG_IMAGETYPE_ARRAY == img->cmn.type)) {
  9432. if (compressed) {
  9433. SOKOL_ASSERT(!msaa); _SOKOL_UNUSED(msaa);
  9434. glCompressedTexImage3D(tgt, mip_index, ifmt, w, h, depth, 0, data_size, data_ptr);
  9435. } else {
  9436. const GLenum type = _sg_gl_teximage_type(img->cmn.pixel_format);
  9437. const GLenum fmt = _sg_gl_teximage_format(img->cmn.pixel_format);
  9438. #if defined(SOKOL_GLCORE) && !defined(__APPLE__)
  9439. if (msaa) {
  9440. // NOTE: MSAA works only for array textures, not 3D textures
  9441. glTexImage3DMultisample(tgt, img->cmn.sample_count, ifmt, w, h, depth, GL_TRUE);
  9442. } else {
  9443. glTexImage3D(tgt, mip_index, (GLint)ifmt, w, h, depth, 0, fmt, type, data_ptr);
  9444. }
  9445. #else
  9446. SOKOL_ASSERT(!msaa); _SOKOL_UNUSED(msaa);
  9447. glTexImage3D(tgt, mip_index, (GLint)ifmt, w, h, depth, 0, fmt, type, data_ptr);
  9448. #endif
  9449. }
  9450. }
  9451. #endif
  9452. _SG_GL_CHECK_ERROR();
  9453. }
  9454. _SOKOL_PRIVATE sg_resource_state _sg_gl_create_image(_sg_image_t* img, const sg_image_desc* desc) {
  9455. SOKOL_ASSERT(img && desc);
  9456. _SG_GL_CHECK_ERROR();
  9457. img->gl.injected = (0 != desc->gl_textures[0]);
  9458. // check if texture format is support
  9459. if (!_sg_gl_supported_texture_format(img->cmn.pixel_format)) {
  9460. _SG_ERROR(GL_TEXTURE_FORMAT_NOT_SUPPORTED);
  9461. return SG_RESOURCESTATE_FAILED;
  9462. }
  9463. if (img->gl.injected) {
  9464. img->gl.target = _sg_gl_texture_target(img->cmn.type, img->cmn.sample_count);
  9465. // inject externally GL textures
  9466. for (int slot = 0; slot < img->cmn.num_slots; slot++) {
  9467. SOKOL_ASSERT(desc->gl_textures[slot]);
  9468. img->gl.tex[slot] = desc->gl_textures[slot];
  9469. }
  9470. if (desc->gl_texture_target) {
  9471. img->gl.target = (GLenum)desc->gl_texture_target;
  9472. }
  9473. } else {
  9474. // on platforms that don't support MSAA texture bindings, no actual GL
  9475. // texture object is created, instead only attachment view object can be built
  9476. const bool msaa = img->cmn.sample_count > 1;
  9477. if (msaa && !_sg.features.msaa_texture_bindings) {
  9478. if (img->cmn.usage.color_attachment || img->cmn.usage.depth_stencil_attachment) {
  9479. return SG_RESOURCESTATE_VALID;
  9480. } else {
  9481. return SG_RESOURCESTATE_FAILED;
  9482. }
  9483. }
  9484. img->gl.target = _sg_gl_texture_target(img->cmn.type, img->cmn.sample_count);
  9485. for (int slot = 0; slot < img->cmn.num_slots; slot++) {
  9486. glGenTextures(1, &img->gl.tex[slot]);
  9487. SOKOL_ASSERT(img->gl.tex[slot]);
  9488. _sg_gl_cache_store_texture_sampler_binding(0);
  9489. _sg_gl_cache_bind_texture_sampler(0, img->gl.target, img->gl.tex[slot], 0);
  9490. _sg_gl_texstorage(img);
  9491. for (int mip_index = 0; mip_index < img->cmn.num_mipmaps; mip_index++) {
  9492. const GLvoid* data_ptr = desc->data.mip_levels[mip_index].ptr;
  9493. const GLsizei data_size = (GLsizei)desc->data.mip_levels[mip_index].size;
  9494. const int mip_width = _sg_miplevel_dim(img->cmn.width, mip_index);
  9495. const int mip_height = _sg_miplevel_dim(img->cmn.height, mip_index);
  9496. const int mip_depth = (SG_IMAGETYPE_3D == img->cmn.type) ? _sg_miplevel_dim(img->cmn.num_slices, mip_index) : img->cmn.num_slices;
  9497. if (SG_IMAGETYPE_CUBE == img->cmn.type) {
  9498. const int surf_pitch = _sg_surface_pitch(img->cmn.pixel_format, mip_width, mip_height, 1);
  9499. // NOTE: surf_ptr is allowed to be null here
  9500. const uint8_t* surf_ptr = (const uint8_t*) data_ptr;
  9501. for (int i = 0; i < 6; i++) {
  9502. const GLenum gl_img_target = _sg_gl_cubeface_target(i);
  9503. _sg_gl_teximage(img, gl_img_target, mip_index, mip_width, mip_height, mip_depth, surf_ptr, surf_pitch);
  9504. if (data_ptr) {
  9505. SOKOL_ASSERT((6 * surf_pitch) <= data_size);
  9506. surf_ptr += surf_pitch;
  9507. }
  9508. }
  9509. } else {
  9510. _sg_gl_teximage(img, img->gl.target, mip_index, mip_width, mip_height, mip_depth, data_ptr, data_size);
  9511. }
  9512. }
  9513. _sg_gl_cache_restore_texture_sampler_binding(0);
  9514. }
  9515. }
  9516. _SG_GL_CHECK_ERROR();
  9517. return SG_RESOURCESTATE_VALID;
  9518. }
  9519. _SOKOL_PRIVATE void _sg_gl_discard_image(_sg_image_t* img) {
  9520. SOKOL_ASSERT(img);
  9521. _SG_GL_CHECK_ERROR();
  9522. for (int slot = 0; slot < img->cmn.num_slots; slot++) {
  9523. if (img->gl.tex[slot]) {
  9524. _sg_gl_cache_invalidate_texture_sampler(img->gl.tex[slot], 0);
  9525. if (!img->gl.injected) {
  9526. glDeleteTextures(1, &img->gl.tex[slot]);
  9527. }
  9528. }
  9529. }
  9530. _SG_GL_CHECK_ERROR();
  9531. }
  9532. _SOKOL_PRIVATE sg_resource_state _sg_gl_create_sampler(_sg_sampler_t* smp, const sg_sampler_desc* desc) {
  9533. SOKOL_ASSERT(smp && desc);
  9534. _SG_GL_CHECK_ERROR();
  9535. smp->gl.injected = (0 != desc->gl_sampler);
  9536. if (smp->gl.injected) {
  9537. smp->gl.smp = (GLuint) desc->gl_sampler;
  9538. } else {
  9539. glGenSamplers(1, &smp->gl.smp);
  9540. SOKOL_ASSERT(smp->gl.smp);
  9541. const GLenum gl_min_filter = _sg_gl_min_filter(smp->cmn.min_filter, smp->cmn.mipmap_filter);
  9542. const GLenum gl_mag_filter = _sg_gl_mag_filter(smp->cmn.mag_filter);
  9543. glSamplerParameteri(smp->gl.smp, GL_TEXTURE_MIN_FILTER, (GLint)gl_min_filter);
  9544. glSamplerParameteri(smp->gl.smp, GL_TEXTURE_MAG_FILTER, (GLint)gl_mag_filter);
  9545. // GL spec has strange defaults for mipmap min/max lod: -1000 to +1000
  9546. const float min_lod = _sg_clamp(desc->min_lod, 0.0f, 1000.0f);
  9547. const float max_lod = _sg_clamp(desc->max_lod, 0.0f, 1000.0f);
  9548. glSamplerParameterf(smp->gl.smp, GL_TEXTURE_MIN_LOD, min_lod);
  9549. glSamplerParameterf(smp->gl.smp, GL_TEXTURE_MAX_LOD, max_lod);
  9550. glSamplerParameteri(smp->gl.smp, GL_TEXTURE_WRAP_S, (GLint)_sg_gl_wrap(smp->cmn.wrap_u));
  9551. glSamplerParameteri(smp->gl.smp, GL_TEXTURE_WRAP_T, (GLint)_sg_gl_wrap(smp->cmn.wrap_v));
  9552. glSamplerParameteri(smp->gl.smp, GL_TEXTURE_WRAP_R, (GLint)_sg_gl_wrap(smp->cmn.wrap_w));
  9553. #if defined(SOKOL_GLCORE)
  9554. float border[4];
  9555. switch (smp->cmn.border_color) {
  9556. case SG_BORDERCOLOR_TRANSPARENT_BLACK:
  9557. border[0] = 0.0f; border[1] = 0.0f; border[2] = 0.0f; border[3] = 0.0f;
  9558. break;
  9559. case SG_BORDERCOLOR_OPAQUE_WHITE:
  9560. border[0] = 1.0f; border[1] = 1.0f; border[2] = 1.0f; border[3] = 1.0f;
  9561. break;
  9562. default:
  9563. border[0] = 0.0f; border[1] = 0.0f; border[2] = 0.0f; border[3] = 1.0f;
  9564. break;
  9565. }
  9566. glSamplerParameterfv(smp->gl.smp, GL_TEXTURE_BORDER_COLOR, border);
  9567. #endif
  9568. if (smp->cmn.compare != SG_COMPAREFUNC_NEVER) {
  9569. glSamplerParameteri(smp->gl.smp, GL_TEXTURE_COMPARE_MODE, GL_COMPARE_REF_TO_TEXTURE);
  9570. glSamplerParameteri(smp->gl.smp, GL_TEXTURE_COMPARE_FUNC, (GLint)_sg_gl_compare_func(smp->cmn.compare));
  9571. } else {
  9572. glSamplerParameteri(smp->gl.smp, GL_TEXTURE_COMPARE_MODE, GL_NONE);
  9573. }
  9574. if (_sg.gl.ext_anisotropic && (smp->cmn.max_anisotropy > 1)) {
  9575. GLint max_aniso = (GLint) smp->cmn.max_anisotropy;
  9576. if (max_aniso > _sg.gl.max_anisotropy) {
  9577. max_aniso = _sg.gl.max_anisotropy;
  9578. }
  9579. glSamplerParameteri(smp->gl.smp, GL_TEXTURE_MAX_ANISOTROPY_EXT, max_aniso);
  9580. }
  9581. }
  9582. _SG_GL_CHECK_ERROR();
  9583. return SG_RESOURCESTATE_VALID;
  9584. }
  9585. _SOKOL_PRIVATE void _sg_gl_discard_sampler(_sg_sampler_t* smp) {
  9586. SOKOL_ASSERT(smp);
  9587. _SG_GL_CHECK_ERROR();
  9588. _sg_gl_cache_invalidate_texture_sampler(0, smp->gl.smp);
  9589. if (!smp->gl.injected) {
  9590. glDeleteSamplers(1, &smp->gl.smp);
  9591. }
  9592. _SG_GL_CHECK_ERROR();
  9593. }
  9594. _SOKOL_PRIVATE GLuint _sg_gl_compile_shader(sg_shader_stage stage, const char* src) {
  9595. SOKOL_ASSERT(src);
  9596. _SG_GL_CHECK_ERROR();
  9597. GLuint gl_shd = glCreateShader(_sg_gl_shader_stage(stage));
  9598. glShaderSource(gl_shd, 1, &src, 0);
  9599. glCompileShader(gl_shd);
  9600. GLint compile_status = 0;
  9601. glGetShaderiv(gl_shd, GL_COMPILE_STATUS, &compile_status);
  9602. if (!compile_status) {
  9603. // compilation failed, log error and delete shader
  9604. GLint log_len = 0;
  9605. glGetShaderiv(gl_shd, GL_INFO_LOG_LENGTH, &log_len);
  9606. if (log_len > 0) {
  9607. GLchar* log_buf = (GLchar*) _sg_malloc((size_t)log_len);
  9608. glGetShaderInfoLog(gl_shd, log_len, &log_len, log_buf);
  9609. _SG_ERROR(GL_SHADER_COMPILATION_FAILED);
  9610. _SG_LOGMSG(GL_SHADER_COMPILATION_FAILED, log_buf);
  9611. _sg_free(log_buf);
  9612. }
  9613. glDeleteShader(gl_shd);
  9614. gl_shd = 0;
  9615. }
  9616. _SG_GL_CHECK_ERROR();
  9617. return gl_shd;
  9618. }
  9619. // NOTE: this is an out-of-range check for GLSL bindslots that's also active in release mode
  9620. _SOKOL_PRIVATE bool _sg_gl_ensure_glsl_bindslot_ranges(const sg_shader_desc* desc) {
  9621. SOKOL_ASSERT(desc); _SOKOL_UNUSED(desc);
  9622. #if defined(_SOKOL_GL_HAS_COMPUTE)
  9623. SOKOL_ASSERT(_sg.limits.max_storage_buffer_bindings_per_stage <= _SG_GL_MAX_SBUF_BINDINGS);
  9624. SOKOL_ASSERT(_sg.limits.max_storage_image_bindings_per_stage <= _SG_GL_MAX_SIMG_BINDINGS);
  9625. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  9626. const sg_shader_view* view = &desc->views[i];
  9627. if (view->storage_buffer.stage != SG_SHADERSTAGE_NONE) {
  9628. if (view->storage_buffer.glsl_binding_n >= _sg.limits.max_storage_buffer_bindings_per_stage) {
  9629. _SG_ERROR(GL_STORAGEBUFFER_GLSL_BINDING_OUT_OF_RANGE);
  9630. return false;
  9631. }
  9632. }
  9633. if (view->storage_image.stage != SG_SHADERSTAGE_NONE) {
  9634. if (view->storage_image.glsl_binding_n >= _sg.limits.max_storage_image_bindings_per_stage) {
  9635. _SG_ERROR(GL_STORAGEIMAGE_GLSL_BINDING_OUT_OF_RANGE);
  9636. return false;
  9637. }
  9638. }
  9639. }
  9640. #endif
  9641. return true;
  9642. }
  9643. _SOKOL_PRIVATE sg_resource_state _sg_gl_create_shader(_sg_shader_t* shd, const sg_shader_desc* desc) {
  9644. SOKOL_ASSERT(shd && desc);
  9645. SOKOL_ASSERT(!shd->gl.prog);
  9646. _SG_GL_CHECK_ERROR();
  9647. // perform a fatal range-check on GLSL bindslots that's also active
  9648. // in release mode to avoid potential out-of-bounds array accesses
  9649. if (!_sg_gl_ensure_glsl_bindslot_ranges(desc)) {
  9650. return SG_RESOURCESTATE_FAILED;
  9651. }
  9652. // copy the optional vertex attribute names over
  9653. for (int i = 0; i < SG_MAX_VERTEX_ATTRIBUTES; i++) {
  9654. _sg_strcpy(&shd->gl.attrs[i].name, desc->attrs[i].glsl_name);
  9655. }
  9656. const bool has_vs = desc->vertex_func.source;
  9657. const bool has_fs = desc->fragment_func.source;
  9658. const bool has_cs = desc->compute_func.source;
  9659. SOKOL_ASSERT((has_vs && has_fs) || has_cs);
  9660. GLuint gl_prog = glCreateProgram();
  9661. if (has_vs && has_fs) {
  9662. GLuint gl_vs = _sg_gl_compile_shader(SG_SHADERSTAGE_VERTEX, desc->vertex_func.source);
  9663. GLuint gl_fs = _sg_gl_compile_shader(SG_SHADERSTAGE_FRAGMENT, desc->fragment_func.source);
  9664. if (!(gl_vs && gl_fs)) {
  9665. glDeleteProgram(gl_prog);
  9666. if (gl_vs) { glDeleteShader(gl_vs); }
  9667. if (gl_fs) { glDeleteShader(gl_fs); }
  9668. return SG_RESOURCESTATE_FAILED;
  9669. }
  9670. glAttachShader(gl_prog, gl_vs);
  9671. glAttachShader(gl_prog, gl_fs);
  9672. glLinkProgram(gl_prog);
  9673. glDeleteShader(gl_vs);
  9674. glDeleteShader(gl_fs);
  9675. _SG_GL_CHECK_ERROR();
  9676. } else if (has_cs) {
  9677. GLuint gl_cs = _sg_gl_compile_shader(SG_SHADERSTAGE_COMPUTE, desc->compute_func.source);
  9678. if (!gl_cs) {
  9679. glDeleteProgram(gl_prog);
  9680. return SG_RESOURCESTATE_FAILED;
  9681. }
  9682. glAttachShader(gl_prog, gl_cs);
  9683. glLinkProgram(gl_prog);
  9684. glDeleteShader(gl_cs);
  9685. _SG_GL_CHECK_ERROR();
  9686. } else {
  9687. SOKOL_UNREACHABLE;
  9688. }
  9689. GLint link_status;
  9690. glGetProgramiv(gl_prog, GL_LINK_STATUS, &link_status);
  9691. if (!link_status) {
  9692. GLint log_len = 0;
  9693. glGetProgramiv(gl_prog, GL_INFO_LOG_LENGTH, &log_len);
  9694. if (log_len > 0) {
  9695. GLchar* log_buf = (GLchar*) _sg_malloc((size_t)log_len);
  9696. glGetProgramInfoLog(gl_prog, log_len, &log_len, log_buf);
  9697. _SG_ERROR(GL_SHADER_LINKING_FAILED);
  9698. _SG_LOGMSG(GL_SHADER_LINKING_FAILED, log_buf);
  9699. _sg_free(log_buf);
  9700. }
  9701. glDeleteProgram(gl_prog);
  9702. return SG_RESOURCESTATE_FAILED;
  9703. }
  9704. shd->gl.prog = gl_prog;
  9705. // resolve uniforms
  9706. _SG_GL_CHECK_ERROR();
  9707. for (size_t ub_index = 0; ub_index < SG_MAX_UNIFORMBLOCK_BINDSLOTS; ub_index++) {
  9708. const sg_shader_uniform_block* ub_desc = &desc->uniform_blocks[ub_index];
  9709. if (ub_desc->stage == SG_SHADERSTAGE_NONE) {
  9710. continue;
  9711. }
  9712. SOKOL_ASSERT(ub_desc->size > 0);
  9713. _sg_gl_uniform_block_t* ub = &shd->gl.uniform_blocks[ub_index];
  9714. SOKOL_ASSERT(ub->num_uniforms == 0);
  9715. uint32_t cur_uniform_offset = 0;
  9716. for (int u_index = 0; u_index < SG_MAX_UNIFORMBLOCK_MEMBERS; u_index++) {
  9717. const sg_glsl_shader_uniform* u_desc = &ub_desc->glsl_uniforms[u_index];
  9718. if (u_desc->type == SG_UNIFORMTYPE_INVALID) {
  9719. break;
  9720. }
  9721. const uint32_t u_align = _sg_uniform_alignment(u_desc->type, u_desc->array_count, ub_desc->layout);
  9722. const uint32_t u_size = _sg_uniform_size(u_desc->type, u_desc->array_count, ub_desc->layout);
  9723. cur_uniform_offset = _sg_align_u32(cur_uniform_offset, u_align);
  9724. _sg_gl_uniform_t* u = &ub->uniforms[u_index];
  9725. u->type = u_desc->type;
  9726. u->count = (uint16_t) u_desc->array_count;
  9727. u->offset = (uint16_t) cur_uniform_offset;
  9728. SOKOL_ASSERT(u_desc->glsl_name);
  9729. u->gl_loc = glGetUniformLocation(gl_prog, u_desc->glsl_name);
  9730. if (u->gl_loc == -1) {
  9731. _SG_WARN(GL_UNIFORMBLOCK_NAME_NOT_FOUND_IN_SHADER);
  9732. _SG_LOGMSG(GL_UNIFORMBLOCK_NAME_NOT_FOUND_IN_SHADER, u_desc->glsl_name);
  9733. }
  9734. cur_uniform_offset += u_size;
  9735. ub->num_uniforms++;
  9736. }
  9737. if (ub_desc->layout == SG_UNIFORMLAYOUT_STD140) {
  9738. cur_uniform_offset = _sg_align_u32(cur_uniform_offset, 16);
  9739. }
  9740. SOKOL_ASSERT(ub_desc->size == (size_t)cur_uniform_offset);
  9741. _SOKOL_UNUSED(cur_uniform_offset);
  9742. }
  9743. // copy resource bindslot mappings
  9744. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  9745. const sg_shader_view* view = &desc->views[i];
  9746. SOKOL_ASSERT(0 == shd->gl.sbuf_binding[i]);
  9747. SOKOL_ASSERT(0 == shd->gl.simg_binding[i]);
  9748. if (view->storage_buffer.stage != SG_SHADERSTAGE_NONE) {
  9749. shd->gl.sbuf_binding[i] = view->storage_buffer.glsl_binding_n;
  9750. } else if (view->storage_image.stage != SG_SHADERSTAGE_NONE) {
  9751. shd->gl.simg_binding[i] = view->storage_image.glsl_binding_n;
  9752. }
  9753. }
  9754. // record image sampler location in shader program
  9755. _SG_GL_CHECK_ERROR();
  9756. GLuint cur_prog = 0;
  9757. glGetIntegerv(GL_CURRENT_PROGRAM, (GLint*)&cur_prog);
  9758. glUseProgram(gl_prog);
  9759. GLint gl_tex_slot = 0;
  9760. for (size_t tex_smp_index = 0; tex_smp_index < SG_MAX_TEXTURE_SAMPLER_PAIRS; tex_smp_index++) {
  9761. const sg_shader_texture_sampler_pair* tex_smp_desc = &desc->texture_sampler_pairs[tex_smp_index];
  9762. if (tex_smp_desc->stage == SG_SHADERSTAGE_NONE) {
  9763. continue;
  9764. }
  9765. SOKOL_ASSERT(tex_smp_desc->glsl_name);
  9766. GLint gl_loc = glGetUniformLocation(gl_prog, tex_smp_desc->glsl_name);
  9767. if (gl_loc != -1) {
  9768. glUniform1i(gl_loc, gl_tex_slot);
  9769. shd->gl.tex_slot[tex_smp_index] = (int8_t)gl_tex_slot++;
  9770. } else {
  9771. shd->gl.tex_slot[tex_smp_index] = -1;
  9772. _SG_WARN(GL_IMAGE_SAMPLER_NAME_NOT_FOUND_IN_SHADER);
  9773. _SG_LOGMSG(GL_IMAGE_SAMPLER_NAME_NOT_FOUND_IN_SHADER, tex_smp_desc->glsl_name);
  9774. }
  9775. }
  9776. // it's legal to call glUseProgram with 0
  9777. glUseProgram(cur_prog);
  9778. _SG_GL_CHECK_ERROR();
  9779. return SG_RESOURCESTATE_VALID;
  9780. }
  9781. _SOKOL_PRIVATE void _sg_gl_discard_shader(_sg_shader_t* shd) {
  9782. SOKOL_ASSERT(shd);
  9783. _SG_GL_CHECK_ERROR();
  9784. if (shd->gl.prog) {
  9785. _sg_gl_cache_invalidate_program(shd->gl.prog);
  9786. glDeleteProgram(shd->gl.prog);
  9787. }
  9788. _SG_GL_CHECK_ERROR();
  9789. }
  9790. _SOKOL_PRIVATE sg_resource_state _sg_gl_create_pipeline(_sg_pipeline_t* pip, const sg_pipeline_desc* desc) {
  9791. SOKOL_ASSERT(pip && desc);
  9792. SOKOL_ASSERT(_sg.limits.max_vertex_attrs <= SG_MAX_VERTEX_ATTRIBUTES);
  9793. if (pip->cmn.is_compute) {
  9794. // shortcut for compute pipelines
  9795. return SG_RESOURCESTATE_VALID;
  9796. }
  9797. pip->gl.primitive_type = desc->primitive_type;
  9798. pip->gl.depth = desc->depth;
  9799. pip->gl.stencil = desc->stencil;
  9800. // FIXME: blend color and write mask per draw-buffer-attachment (requires GL4)
  9801. pip->gl.blend = desc->colors[0].blend;
  9802. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  9803. pip->gl.color_write_mask[i] = desc->colors[i].write_mask;
  9804. }
  9805. pip->gl.cull_mode = desc->cull_mode;
  9806. pip->gl.face_winding = desc->face_winding;
  9807. pip->gl.sample_count = desc->sample_count;
  9808. pip->gl.alpha_to_coverage_enabled = desc->alpha_to_coverage_enabled;
  9809. // NOTE: GLSL compilers may remove unused vertex attributes so we can't rely
  9810. // on the 'prepopulated' vertex_buffer_layout_active[] state and need to
  9811. // fill this array from scratch with the actual info after GLSL compilation
  9812. for (int i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  9813. pip->cmn.vertex_buffer_layout_active[i] = false;
  9814. }
  9815. // resolve vertex attributes
  9816. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  9817. SOKOL_ASSERT(shd->gl.prog);
  9818. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  9819. pip->gl.attrs[attr_index].vb_index = -1;
  9820. }
  9821. for (int attr_index = 0; attr_index < _sg.limits.max_vertex_attrs; attr_index++) {
  9822. const sg_vertex_attr_state* a_state = &desc->layout.attrs[attr_index];
  9823. if (a_state->format == SG_VERTEXFORMAT_INVALID) {
  9824. break;
  9825. }
  9826. SOKOL_ASSERT(a_state->buffer_index < SG_MAX_VERTEXBUFFER_BINDSLOTS);
  9827. const sg_vertex_buffer_layout_state* l_state = &desc->layout.buffers[a_state->buffer_index];
  9828. const sg_vertex_step step_func = l_state->step_func;
  9829. const int step_rate = l_state->step_rate;
  9830. GLint attr_loc = attr_index;
  9831. if (!_sg_strempty(&shd->gl.attrs[attr_index].name)) {
  9832. attr_loc = glGetAttribLocation(shd->gl.prog, _sg_strptr(&shd->gl.attrs[attr_index].name));
  9833. }
  9834. if (attr_loc != -1) {
  9835. SOKOL_ASSERT(attr_loc < (GLint)_sg.limits.max_vertex_attrs);
  9836. _sg_gl_attr_t* gl_attr = &pip->gl.attrs[attr_loc];
  9837. SOKOL_ASSERT(gl_attr->vb_index == -1);
  9838. gl_attr->vb_index = (int8_t) a_state->buffer_index;
  9839. if (step_func == SG_VERTEXSTEP_PER_VERTEX) {
  9840. gl_attr->divisor = 0;
  9841. } else {
  9842. gl_attr->divisor = (int8_t) step_rate;
  9843. }
  9844. SOKOL_ASSERT(l_state->stride > 0);
  9845. gl_attr->stride = (uint8_t) l_state->stride;
  9846. gl_attr->offset = a_state->offset;
  9847. gl_attr->size = (uint8_t) _sg_gl_vertexformat_size(a_state->format);
  9848. gl_attr->type = _sg_gl_vertexformat_type(a_state->format);
  9849. gl_attr->normalized = _sg_gl_vertexformat_normalized(a_state->format);
  9850. gl_attr->base_type = _sg_vertexformat_basetype(a_state->format);
  9851. pip->cmn.vertex_buffer_layout_active[a_state->buffer_index] = true;
  9852. } else {
  9853. _SG_WARN(GL_VERTEX_ATTRIBUTE_NOT_FOUND_IN_SHADER);
  9854. _SG_LOGMSG(GL_VERTEX_ATTRIBUTE_NOT_FOUND_IN_SHADER, _sg_strptr(&shd->gl.attrs[attr_index].name));
  9855. }
  9856. }
  9857. return SG_RESOURCESTATE_VALID;
  9858. }
  9859. _SOKOL_PRIVATE void _sg_gl_discard_pipeline(_sg_pipeline_t* pip) {
  9860. SOKOL_ASSERT(pip);
  9861. _sg_gl_cache_invalidate_pipeline(pip);
  9862. }
  9863. _SOKOL_PRIVATE void _sg_gl_fb_attach_texture(const _sg_view_t* view, GLenum gl_att_type) {
  9864. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  9865. const GLuint gl_tex = img->gl.tex[0];
  9866. SOKOL_ASSERT(gl_tex);
  9867. const GLuint gl_target = img->gl.target;
  9868. SOKOL_ASSERT(gl_target);
  9869. const int mip_level = view->cmn.img.mip_level;
  9870. const int slice = view->cmn.img.slice;
  9871. switch (img->cmn.type) {
  9872. case SG_IMAGETYPE_2D:
  9873. glFramebufferTexture2D(GL_FRAMEBUFFER, gl_att_type, gl_target, gl_tex, mip_level);
  9874. break;
  9875. case SG_IMAGETYPE_CUBE:
  9876. glFramebufferTexture2D(GL_FRAMEBUFFER, gl_att_type, _sg_gl_cubeface_target(slice), gl_tex, mip_level);
  9877. break;
  9878. default:
  9879. glFramebufferTextureLayer(GL_FRAMEBUFFER, gl_att_type, gl_tex, mip_level, slice);
  9880. break;
  9881. }
  9882. }
  9883. _SOKOL_PRIVATE GLenum _sg_gl_depth_stencil_attachment_type(const _sg_image_t* ds_img) {
  9884. if (_sg_is_depth_stencil_format(ds_img->cmn.pixel_format)) {
  9885. return GL_DEPTH_STENCIL_ATTACHMENT;
  9886. } else {
  9887. return GL_DEPTH_ATTACHMENT;
  9888. }
  9889. }
  9890. _SOKOL_PRIVATE bool _sg_gl_check_framebuffer_status(void) {
  9891. const GLenum fb_status = glCheckFramebufferStatus(GL_FRAMEBUFFER);
  9892. if (fb_status != GL_FRAMEBUFFER_COMPLETE) {
  9893. switch (fb_status) {
  9894. case GL_FRAMEBUFFER_UNDEFINED:
  9895. _SG_ERROR(GL_FRAMEBUFFER_STATUS_UNDEFINED);
  9896. break;
  9897. case GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT:
  9898. _SG_ERROR(GL_FRAMEBUFFER_STATUS_INCOMPLETE_ATTACHMENT);
  9899. break;
  9900. case GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT:
  9901. _SG_ERROR(GL_FRAMEBUFFER_STATUS_INCOMPLETE_MISSING_ATTACHMENT);
  9902. break;
  9903. case GL_FRAMEBUFFER_UNSUPPORTED:
  9904. _SG_ERROR(GL_FRAMEBUFFER_STATUS_UNSUPPORTED);
  9905. break;
  9906. case GL_FRAMEBUFFER_INCOMPLETE_MULTISAMPLE:
  9907. _SG_ERROR(GL_FRAMEBUFFER_STATUS_INCOMPLETE_MULTISAMPLE);
  9908. break;
  9909. default:
  9910. _SG_ERROR(GL_FRAMEBUFFER_STATUS_UNKNOWN);
  9911. break;
  9912. }
  9913. return false;
  9914. }
  9915. return true;
  9916. }
  9917. _SOKOL_PRIVATE sg_resource_state _sg_gl_create_view(_sg_view_t* view, const sg_view_desc* desc) {
  9918. SOKOL_ASSERT(view && desc);
  9919. _SOKOL_UNUSED(desc);
  9920. _SG_GL_CHECK_ERROR();
  9921. if ((view->cmn.type == SG_VIEWTYPE_TEXTURE) && (_sg.features.gl_texture_views)) {
  9922. #if defined(_SOKOL_GL_HAS_TEXVIEWS)
  9923. if (_sg.features.gl_texture_views) {
  9924. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  9925. for (int slot = 0; slot < img->cmn.num_slots; slot++) {
  9926. SOKOL_ASSERT(img->gl.tex[slot] != 0);
  9927. const GLuint min_level = (GLuint)view->cmn.img.mip_level;
  9928. const GLuint num_levels = (GLuint)view->cmn.img.mip_level_count;
  9929. const GLuint min_layer = (GLuint)view->cmn.img.slice;
  9930. const GLuint num_layers = (GLuint)view->cmn.img.slice_count;
  9931. const GLenum ifmt = _sg_gl_teximage_internal_format(img->cmn.pixel_format);
  9932. glGenTextures(1, &view->gl.tex_view[slot]);
  9933. glTextureView(view->gl.tex_view[slot], img->gl.target, img->gl.tex[slot], ifmt, min_level, num_levels, min_layer, num_layers);
  9934. }
  9935. }
  9936. #endif
  9937. } else if ((view->cmn.type == SG_VIEWTYPE_COLORATTACHMENT) || (view->cmn.type == SG_VIEWTYPE_DEPTHSTENCILATTACHMENT)) {
  9938. // create MSAA render buffer if MSAA textures are not supported
  9939. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  9940. const bool msaa = img->cmn.sample_count > 1;
  9941. if (msaa && !_sg.features.msaa_texture_bindings) {
  9942. const GLenum gl_internal_format = _sg_gl_teximage_internal_format(img->cmn.pixel_format);
  9943. glGenRenderbuffers(1, &view->gl.msaa_render_buffer);
  9944. glBindRenderbuffer(GL_RENDERBUFFER, view->gl.msaa_render_buffer);
  9945. glRenderbufferStorageMultisample(GL_RENDERBUFFER, img->cmn.sample_count, gl_internal_format, img->cmn.width, img->cmn.height);
  9946. }
  9947. } else if (view->cmn.type == SG_VIEWTYPE_RESOLVEATTACHMENT) {
  9948. // store current framebuffer binding (restored at end of block)
  9949. GLuint gl_orig_fb;
  9950. glGetIntegerv(GL_FRAMEBUFFER_BINDING, (GLint*)&gl_orig_fb);
  9951. // create MSAA resolve framebuffer
  9952. glGenFramebuffers(1, &view->gl.msaa_resolve_frame_buffer);
  9953. glBindFramebuffer(GL_FRAMEBUFFER, view->gl.msaa_resolve_frame_buffer);
  9954. _sg_gl_fb_attach_texture(view, GL_COLOR_ATTACHMENT0);
  9955. if (!_sg_gl_check_framebuffer_status()) {
  9956. return SG_RESOURCESTATE_FAILED;
  9957. }
  9958. // setup color attachments for the framebuffer
  9959. static const GLenum gl_draw_buf = GL_COLOR_ATTACHMENT0;
  9960. glDrawBuffers(1, &gl_draw_buf);
  9961. // bind original framebuffer
  9962. glBindFramebuffer(GL_FRAMEBUFFER, gl_orig_fb);
  9963. }
  9964. _SG_GL_CHECK_ERROR();
  9965. return SG_RESOURCESTATE_VALID;
  9966. }
  9967. _SOKOL_PRIVATE void _sg_gl_discard_view(_sg_view_t* view) {
  9968. SOKOL_ASSERT(view);
  9969. _SG_GL_CHECK_ERROR();
  9970. for (size_t slot = 0; slot < SG_NUM_INFLIGHT_FRAMES; slot++) {
  9971. if (0 != view->gl.tex_view[slot]) {
  9972. // NOTE: cache invalidation also works as expected without
  9973. // GL texture view support, in that case the view's texture object
  9974. // will simply remain bound until the sg_image object is discarded
  9975. _sg_gl_cache_invalidate_texture_sampler(view->gl.tex_view[slot], 0);
  9976. glDeleteTextures(1, &view->gl.tex_view[slot]);
  9977. }
  9978. }
  9979. if (view->gl.msaa_render_buffer) {
  9980. glDeleteRenderbuffers(1, &view->gl.msaa_render_buffer);
  9981. }
  9982. if (view->gl.msaa_resolve_frame_buffer) {
  9983. glDeleteFramebuffers(1, &view->gl.msaa_resolve_frame_buffer);
  9984. }
  9985. _SG_GL_CHECK_ERROR();
  9986. }
  9987. #if defined(_SOKOL_GL_HAS_COMPUTE)
  9988. _SOKOL_PRIVATE void _sg_gl_handle_memory_barriers(const _sg_shader_t* shd, const _sg_bindings_ptrs_t* bnd, const _sg_attachments_ptrs_t* atts) {
  9989. SOKOL_ASSERT((shd && bnd && atts == 0) || (atts && shd == 0 && bnd == 0));
  9990. if (!_sg.features.compute) {
  9991. return;
  9992. }
  9993. GLbitfield gl_barrier_bits = 0;
  9994. // if vertex-, index- or storage-buffer bindings have been written
  9995. // by a compute shader before, a barrier must be issued
  9996. if (bnd) {
  9997. for (size_t i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  9998. _sg_buffer_t* buf = bnd->vbs[i];
  9999. if (!buf) {
  10000. continue;
  10001. }
  10002. if (buf->gl.gpu_dirty_flags & _SG_GL_GPUDIRTY_VERTEXBUFFER) {
  10003. gl_barrier_bits |= GL_VERTEX_ATTRIB_ARRAY_BARRIER_BIT;
  10004. buf->gl.gpu_dirty_flags &= (uint8_t)~_SG_GL_GPUDIRTY_VERTEXBUFFER;
  10005. }
  10006. }
  10007. if (bnd->ib) {
  10008. _sg_buffer_t* buf = bnd->ib;
  10009. if (buf->gl.gpu_dirty_flags & _SG_GL_GPUDIRTY_INDEXBUFFER) {
  10010. gl_barrier_bits |= GL_ELEMENT_ARRAY_BARRIER_BIT;
  10011. buf->gl.gpu_dirty_flags &= (uint8_t)~_SG_GL_GPUDIRTY_INDEXBUFFER;
  10012. }
  10013. }
  10014. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  10015. const _sg_view_t* view = bnd->views[i];
  10016. if (!view) {
  10017. continue;
  10018. }
  10019. if (view->cmn.type == SG_VIEWTYPE_STORAGEBUFFER) {
  10020. _sg_buffer_t* buf = _sg_buffer_ref_ptr(&view->cmn.buf.ref);
  10021. if (buf->gl.gpu_dirty_flags & _SG_GL_GPUDIRTY_STORAGEBUFFER) {
  10022. gl_barrier_bits |= GL_SHADER_STORAGE_BARRIER_BIT;
  10023. buf->gl.gpu_dirty_flags &= (uint8_t)~_SG_GL_GPUDIRTY_STORAGEBUFFER;
  10024. }
  10025. } else if (view->cmn.type == SG_VIEWTYPE_TEXTURE) {
  10026. _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  10027. if (img->gl.gpu_dirty_flags & _SG_GL_GPUDIRTY_TEXTURE) {
  10028. gl_barrier_bits |= GL_TEXTURE_FETCH_BARRIER_BIT;
  10029. img->gl.gpu_dirty_flags &= (uint8_t)~_SG_GL_GPUDIRTY_TEXTURE;
  10030. }
  10031. } else if (view->cmn.type == SG_VIEWTYPE_STORAGEIMAGE) {
  10032. _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  10033. if (img->gl.gpu_dirty_flags &= _SG_GL_GPUDIRTY_STORAGEIMAGE) {
  10034. gl_barrier_bits |= GL_SHADER_IMAGE_ACCESS_BARRIER_BIT;
  10035. img->gl.gpu_dirty_flags &= (uint8_t)~_SG_GL_GPUDIRTY_STORAGEIMAGE;
  10036. }
  10037. } else {
  10038. SOKOL_UNREACHABLE;
  10039. }
  10040. }
  10041. }
  10042. if (atts) {
  10043. for (int i = 0; i < atts->num_color_views; i++) {
  10044. const _sg_view_t* view = atts->color_views[i];
  10045. SOKOL_ASSERT(view);
  10046. _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  10047. if (img->gl.gpu_dirty_flags & _SG_GL_GPUDIRTY_ATTACHMENT) {
  10048. gl_barrier_bits |= GL_FRAMEBUFFER_BARRIER_BIT;
  10049. img->gl.gpu_dirty_flags &= (uint8_t)~_SG_GL_GPUDIRTY_ATTACHMENT;
  10050. }
  10051. }
  10052. }
  10053. if (0 != gl_barrier_bits) {
  10054. glMemoryBarrier(gl_barrier_bits);
  10055. _sg_stats_inc(gl.num_memory_barriers);
  10056. }
  10057. // mark resources as dirty which will be written by compute shaders
  10058. // (don't merge this into the above loop, this would mess up the
  10059. // dirty flags if the same resource is bound multiple times)
  10060. if (bnd) {
  10061. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  10062. const _sg_view_t* view = bnd->views[i];
  10063. if (!view) {
  10064. continue;
  10065. }
  10066. if (view->cmn.type == SG_VIEWTYPE_STORAGEBUFFER) {
  10067. if (!shd->cmn.views[i].sbuf_readonly) {
  10068. _sg_buffer_t* buf = _sg_buffer_ref_ptr(&view->cmn.buf.ref);
  10069. buf->gl.gpu_dirty_flags = _SG_GL_GPUDIRTY_BUFFER_ALL;
  10070. }
  10071. } else if (view->cmn.type == SG_VIEWTYPE_STORAGEIMAGE) {
  10072. // NOTE: storage image bindings are always written, otherwise
  10073. // they would be texture bindings!
  10074. _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  10075. img->gl.gpu_dirty_flags = _SG_GL_GPUDIRTY_IMAGE_ALL;
  10076. }
  10077. }
  10078. }
  10079. }
  10080. #endif
  10081. _SOKOL_PRIVATE void _sg_gl_begin_pass(const sg_pass* pass, const _sg_attachments_ptrs_t* atts) {
  10082. SOKOL_ASSERT(pass && atts);
  10083. _SG_GL_CHECK_ERROR();
  10084. // early out if this a compute pass
  10085. if (pass->compute) {
  10086. return;
  10087. }
  10088. const sg_swapchain* swapchain = &pass->swapchain;
  10089. const sg_pass_action* action = &pass->action;
  10090. const bool is_swapchain_pass = atts->empty;
  10091. const bool is_offscreen_pass = !atts->empty;
  10092. // bind the render pass framebuffer
  10093. //
  10094. // FIXME: Disabling SRGB conversion for the default framebuffer is
  10095. // a crude hack to make behaviour for sRGB render target textures
  10096. // identical with the Metal and D3D11 swapchains created by sokol-app.
  10097. //
  10098. // This will need a cleaner solution (e.g. allowing to configure
  10099. // sokol_app.h with an sRGB or RGB framebuffer.
  10100. if (is_offscreen_pass) {
  10101. // offscreen pass, mutate the global offscreen framebuffer object
  10102. #if defined(SOKOL_GLCORE)
  10103. glEnable(GL_FRAMEBUFFER_SRGB);
  10104. #endif
  10105. glBindFramebuffer(GL_FRAMEBUFFER, _sg.gl.fb);
  10106. for (int i = 0; i < atts->num_color_views; i++) {
  10107. const _sg_view_t* view = atts->color_views[i];
  10108. const GLenum gl_att_type = (GLenum)(GL_COLOR_ATTACHMENT0 + i);
  10109. if (view->gl.msaa_render_buffer) {
  10110. glFramebufferRenderbuffer(GL_FRAMEBUFFER, gl_att_type, GL_RENDERBUFFER, view->gl.msaa_render_buffer);
  10111. } else {
  10112. _sg_gl_fb_attach_texture(view, gl_att_type);
  10113. }
  10114. }
  10115. // explicitly detach unused color attachments
  10116. for (int i = atts->num_color_views; i < _sg.limits.max_color_attachments; i++) {
  10117. const GLenum gl_att_type = (GLenum)(GL_COLOR_ATTACHMENT0 + i);
  10118. glFramebufferRenderbuffer(GL_FRAMEBUFFER, gl_att_type, GL_RENDERBUFFER, 0);
  10119. glFramebufferTexture2D(GL_FRAMEBUFFER, gl_att_type, GL_TEXTURE_2D, 0, 0);
  10120. }
  10121. if (atts->ds_view) {
  10122. const _sg_view_t* view = atts->ds_view;
  10123. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  10124. const GLenum gl_att_type = _sg_gl_depth_stencil_attachment_type(img);
  10125. if (view->gl.msaa_render_buffer) {
  10126. glFramebufferRenderbuffer(GL_FRAMEBUFFER, gl_att_type, GL_RENDERBUFFER, view->gl.msaa_render_buffer);
  10127. } else {
  10128. _sg_gl_fb_attach_texture(view, gl_att_type);
  10129. }
  10130. } else {
  10131. // explicitly detach depth-stencil attachment if not used in this pass
  10132. glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, 0);
  10133. glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_TEXTURE_2D, 0, 0);
  10134. }
  10135. if (!_sg_gl_check_framebuffer_status()) {
  10136. _sg.cur_pass.valid = false;
  10137. return;
  10138. }
  10139. GLenum gl_draw_bufs[SG_MAX_COLOR_ATTACHMENTS];
  10140. SOKOL_ASSERT(_sg.limits.max_color_attachments <= SG_MAX_COLOR_ATTACHMENTS);
  10141. for (int i = 0; i < _sg.limits.max_color_attachments; i++) {
  10142. if (i < atts->num_color_views) {
  10143. gl_draw_bufs[i] = (GLenum)(GL_COLOR_ATTACHMENT0 + i);
  10144. } else {
  10145. gl_draw_bufs[i] = GL_NONE;
  10146. }
  10147. }
  10148. glDrawBuffers(_sg.limits.max_color_attachments, gl_draw_bufs);
  10149. #if defined(_SOKOL_GL_HAS_COMPUTE)
  10150. _sg_gl_handle_memory_barriers(0, 0, atts);
  10151. _SG_GL_CHECK_ERROR();
  10152. #endif
  10153. } else {
  10154. // swapchain pass
  10155. #if defined(SOKOL_GLCORE)
  10156. glDisable(GL_FRAMEBUFFER_SRGB);
  10157. #endif
  10158. // NOTE: on some platforms, the default framebuffer of a context
  10159. // is null, so we can't actually assert here that the
  10160. // framebuffer has been provided
  10161. glBindFramebuffer(GL_FRAMEBUFFER, swapchain->gl.framebuffer);
  10162. }
  10163. glViewport(0, 0, _sg.cur_pass.dim.width, _sg.cur_pass.dim.height);
  10164. glScissor(0, 0, _sg.cur_pass.dim.width, _sg.cur_pass.dim.height);
  10165. // number of color attachments
  10166. const int num_color_atts = is_offscreen_pass ? atts->num_color_views : 1;
  10167. // clear color and depth-stencil attachments if needed
  10168. bool clear_any_color = false;
  10169. for (int i = 0; i < num_color_atts; i++) {
  10170. if (SG_LOADACTION_CLEAR == action->colors[i].load_action) {
  10171. clear_any_color = true;
  10172. break;
  10173. }
  10174. }
  10175. const bool clear_depth = (action->depth.load_action == SG_LOADACTION_CLEAR);
  10176. const bool clear_stencil = (action->stencil.load_action == SG_LOADACTION_CLEAR);
  10177. bool need_pip_cache_flush = false;
  10178. if (clear_any_color) {
  10179. bool need_color_mask_flush = false;
  10180. // NOTE: not a bug to iterate over all possible color attachments
  10181. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  10182. if (SG_COLORMASK_RGBA != _sg.gl.cache.color_write_mask[i]) {
  10183. need_pip_cache_flush = true;
  10184. need_color_mask_flush = true;
  10185. _sg.gl.cache.color_write_mask[i] = SG_COLORMASK_RGBA;
  10186. }
  10187. }
  10188. if (need_color_mask_flush) {
  10189. glColorMask(GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
  10190. }
  10191. }
  10192. if (clear_depth) {
  10193. if (!_sg.gl.cache.depth.write_enabled) {
  10194. need_pip_cache_flush = true;
  10195. _sg.gl.cache.depth.write_enabled = true;
  10196. glDepthMask(GL_TRUE);
  10197. }
  10198. if (_sg.gl.cache.depth.compare != SG_COMPAREFUNC_ALWAYS) {
  10199. need_pip_cache_flush = true;
  10200. _sg.gl.cache.depth.compare = SG_COMPAREFUNC_ALWAYS;
  10201. glDepthFunc(GL_ALWAYS);
  10202. }
  10203. }
  10204. if (clear_stencil) {
  10205. if (_sg.gl.cache.stencil.write_mask != 0xFF) {
  10206. need_pip_cache_flush = true;
  10207. _sg.gl.cache.stencil.write_mask = 0xFF;
  10208. glStencilMask(0xFF);
  10209. }
  10210. }
  10211. if (need_pip_cache_flush) {
  10212. // we messed with the state cache directly, need to clear cached
  10213. // pipeline to force re-evaluation in next sg_apply_pipeline()
  10214. _sg.gl.cache.cur_pip = _sg_sref(0);
  10215. }
  10216. for (int i = 0; i < num_color_atts; i++) {
  10217. if (action->colors[i].load_action == SG_LOADACTION_CLEAR) {
  10218. glClearBufferfv(GL_COLOR, i, &action->colors[i].clear_value.r);
  10219. }
  10220. }
  10221. if (is_swapchain_pass || atts->ds_view) {
  10222. if (clear_depth && clear_stencil) {
  10223. glClearBufferfi(GL_DEPTH_STENCIL, 0, action->depth.clear_value, action->stencil.clear_value);
  10224. } else if (clear_depth) {
  10225. glClearBufferfv(GL_DEPTH, 0, &action->depth.clear_value);
  10226. } else if (clear_stencil) {
  10227. GLint val = (GLint) action->stencil.clear_value;
  10228. glClearBufferiv(GL_STENCIL, 0, &val);
  10229. }
  10230. }
  10231. // keep store actions for end-pass
  10232. for (int i = 0; i < SG_MAX_COLOR_ATTACHMENTS; i++) {
  10233. _sg.gl.color_store_actions[i] = action->colors[i].store_action;
  10234. }
  10235. _sg.gl.depth_store_action = action->depth.store_action;
  10236. _sg.gl.stencil_store_action = action->stencil.store_action;
  10237. _SG_GL_CHECK_ERROR();
  10238. }
  10239. _SOKOL_PRIVATE void _sg_gl_end_render_pass(const _sg_attachments_ptrs_t* atts) {
  10240. SOKOL_ASSERT(atts);
  10241. if (!atts->empty) {
  10242. bool fb_read_bound = false;
  10243. bool fb_draw_bound = false;
  10244. const int num_color_atts = atts->num_color_views;
  10245. for (int i = 0; i < num_color_atts; i++) {
  10246. // perform MSAA resolve if needed
  10247. const _sg_view_t* rsv_view = atts->resolve_views[i];
  10248. if (rsv_view && rsv_view->gl.msaa_resolve_frame_buffer) {
  10249. if (!fb_read_bound) {
  10250. glBindFramebuffer(GL_READ_FRAMEBUFFER, _sg.gl.fb);
  10251. fb_read_bound = true;
  10252. }
  10253. const _sg_image_t* rsv_img = _sg_image_ref_ptr(&rsv_view->cmn.img.ref);
  10254. const int w = rsv_img->cmn.width;
  10255. const int h = rsv_img->cmn.height;
  10256. glBindFramebuffer(GL_DRAW_FRAMEBUFFER, rsv_view->gl.msaa_resolve_frame_buffer);
  10257. glReadBuffer((GLenum)(GL_COLOR_ATTACHMENT0 + i));
  10258. glBlitFramebuffer(0, 0, w, h, 0, 0, w, h, GL_COLOR_BUFFER_BIT, GL_NEAREST);
  10259. fb_draw_bound = true;
  10260. }
  10261. }
  10262. // invalidate framebuffers
  10263. _SOKOL_UNUSED(fb_draw_bound);
  10264. #if defined(SOKOL_GLES3)
  10265. // need to restore framebuffer binding before invalidate if the MSAA resolve had changed the binding
  10266. if (fb_draw_bound) {
  10267. glBindFramebuffer(GL_FRAMEBUFFER, _sg.gl.fb);
  10268. }
  10269. GLenum invalidate_atts[SG_MAX_COLOR_ATTACHMENTS + 2] = { 0 };
  10270. int att_index = 0;
  10271. for (int i = 0; i < num_color_atts; i++) {
  10272. if (_sg.gl.color_store_actions[i] == SG_STOREACTION_DONTCARE) {
  10273. invalidate_atts[att_index++] = (GLenum)(GL_COLOR_ATTACHMENT0 + i);
  10274. }
  10275. }
  10276. if (!atts->ds_view) {
  10277. if (_sg.gl.depth_store_action == SG_STOREACTION_DONTCARE) {
  10278. invalidate_atts[att_index++] = GL_DEPTH_ATTACHMENT;
  10279. }
  10280. if (_sg.gl.stencil_store_action == SG_STOREACTION_DONTCARE) {
  10281. invalidate_atts[att_index++] = GL_STENCIL_ATTACHMENT;
  10282. }
  10283. }
  10284. if (att_index > 0) {
  10285. glInvalidateFramebuffer(GL_DRAW_FRAMEBUFFER, att_index, invalidate_atts);
  10286. }
  10287. #endif
  10288. }
  10289. }
  10290. _SOKOL_PRIVATE void _sg_gl_end_pass(const _sg_attachments_ptrs_t* atts) {
  10291. _SG_GL_CHECK_ERROR();
  10292. if (!_sg.cur_pass.is_compute) {
  10293. _sg_gl_end_render_pass(atts);
  10294. }
  10295. _SG_GL_CHECK_ERROR();
  10296. }
  10297. _SOKOL_PRIVATE void _sg_gl_apply_viewport(int x, int y, int w, int h, bool origin_top_left) {
  10298. y = origin_top_left ? (_sg.cur_pass.dim.height - (y+h)) : y;
  10299. glViewport(x, y, w, h);
  10300. }
  10301. _SOKOL_PRIVATE void _sg_gl_apply_scissor_rect(int x, int y, int w, int h, bool origin_top_left) {
  10302. y = origin_top_left ? (_sg.cur_pass.dim.height - (y+h)) : y;
  10303. glScissor(x, y, w, h);
  10304. }
  10305. _SOKOL_PRIVATE void _sg_gl_apply_render_pipeline_state(_sg_pipeline_t* pip) {
  10306. // update render pipeline state
  10307. _sg.gl.cache.cur_primitive_type = _sg_gl_primitive_type(pip->gl.primitive_type);
  10308. _sg.gl.cache.cur_index_type = _sg_gl_index_type(pip->cmn.index_type);
  10309. // update depth state
  10310. {
  10311. const sg_depth_state* state_ds = &pip->gl.depth;
  10312. sg_depth_state* cache_ds = &_sg.gl.cache.depth;
  10313. if (state_ds->compare != cache_ds->compare) {
  10314. cache_ds->compare = state_ds->compare;
  10315. glDepthFunc(_sg_gl_compare_func(state_ds->compare));
  10316. _sg_stats_inc(gl.num_render_state);
  10317. }
  10318. if (state_ds->write_enabled != cache_ds->write_enabled) {
  10319. cache_ds->write_enabled = state_ds->write_enabled;
  10320. glDepthMask(state_ds->write_enabled);
  10321. _sg_stats_inc(gl.num_render_state);
  10322. }
  10323. if (!_sg_fequal(state_ds->bias, cache_ds->bias, 0.000001f) ||
  10324. !_sg_fequal(state_ds->bias_slope_scale, cache_ds->bias_slope_scale, 0.000001f))
  10325. {
  10326. /* according to ANGLE's D3D11 backend:
  10327. D3D11 SlopeScaledDepthBias ==> GL polygonOffsetFactor
  10328. D3D11 DepthBias ==> GL polygonOffsetUnits
  10329. DepthBiasClamp has no meaning on GL
  10330. */
  10331. cache_ds->bias = state_ds->bias;
  10332. cache_ds->bias_slope_scale = state_ds->bias_slope_scale;
  10333. glPolygonOffset(state_ds->bias_slope_scale, state_ds->bias);
  10334. _sg_stats_inc(gl.num_render_state);
  10335. bool po_enabled = true;
  10336. if (_sg_fequal(state_ds->bias, 0.0f, 0.000001f) &&
  10337. _sg_fequal(state_ds->bias_slope_scale, 0.0f, 0.000001f))
  10338. {
  10339. po_enabled = false;
  10340. }
  10341. if (po_enabled != _sg.gl.cache.polygon_offset_enabled) {
  10342. _sg.gl.cache.polygon_offset_enabled = po_enabled;
  10343. if (po_enabled) {
  10344. glEnable(GL_POLYGON_OFFSET_FILL);
  10345. } else {
  10346. glDisable(GL_POLYGON_OFFSET_FILL);
  10347. }
  10348. _sg_stats_inc(gl.num_render_state);
  10349. }
  10350. }
  10351. }
  10352. // update stencil state
  10353. {
  10354. const sg_stencil_state* state_ss = &pip->gl.stencil;
  10355. sg_stencil_state* cache_ss = &_sg.gl.cache.stencil;
  10356. if (state_ss->enabled != cache_ss->enabled) {
  10357. cache_ss->enabled = state_ss->enabled;
  10358. if (state_ss->enabled) {
  10359. glEnable(GL_STENCIL_TEST);
  10360. } else {
  10361. glDisable(GL_STENCIL_TEST);
  10362. }
  10363. _sg_stats_inc(gl.num_render_state);
  10364. }
  10365. if (state_ss->write_mask != cache_ss->write_mask) {
  10366. cache_ss->write_mask = state_ss->write_mask;
  10367. glStencilMask(state_ss->write_mask);
  10368. _sg_stats_inc(gl.num_render_state);
  10369. }
  10370. for (int i = 0; i < 2; i++) {
  10371. const sg_stencil_face_state* state_sfs = (i==0)? &state_ss->front : &state_ss->back;
  10372. sg_stencil_face_state* cache_sfs = (i==0)? &cache_ss->front : &cache_ss->back;
  10373. GLenum gl_face = (i==0)? GL_FRONT : GL_BACK;
  10374. if ((state_sfs->compare != cache_sfs->compare) ||
  10375. (state_ss->read_mask != cache_ss->read_mask) ||
  10376. (state_ss->ref != cache_ss->ref))
  10377. {
  10378. cache_sfs->compare = state_sfs->compare;
  10379. glStencilFuncSeparate(gl_face,
  10380. _sg_gl_compare_func(state_sfs->compare),
  10381. state_ss->ref,
  10382. state_ss->read_mask);
  10383. _sg_stats_inc(gl.num_render_state);
  10384. }
  10385. if ((state_sfs->fail_op != cache_sfs->fail_op) ||
  10386. (state_sfs->depth_fail_op != cache_sfs->depth_fail_op) ||
  10387. (state_sfs->pass_op != cache_sfs->pass_op))
  10388. {
  10389. cache_sfs->fail_op = state_sfs->fail_op;
  10390. cache_sfs->depth_fail_op = state_sfs->depth_fail_op;
  10391. cache_sfs->pass_op = state_sfs->pass_op;
  10392. glStencilOpSeparate(gl_face,
  10393. _sg_gl_stencil_op(state_sfs->fail_op),
  10394. _sg_gl_stencil_op(state_sfs->depth_fail_op),
  10395. _sg_gl_stencil_op(state_sfs->pass_op));
  10396. _sg_stats_inc(gl.num_render_state);
  10397. }
  10398. }
  10399. cache_ss->read_mask = state_ss->read_mask;
  10400. cache_ss->ref = state_ss->ref;
  10401. }
  10402. if (pip->cmn.color_count > 0) {
  10403. // update blend state
  10404. // FIXME: separate blend state per color attachment
  10405. const sg_blend_state* state_bs = &pip->gl.blend;
  10406. sg_blend_state* cache_bs = &_sg.gl.cache.blend;
  10407. if (state_bs->enabled != cache_bs->enabled) {
  10408. cache_bs->enabled = state_bs->enabled;
  10409. if (state_bs->enabled) {
  10410. glEnable(GL_BLEND);
  10411. } else {
  10412. glDisable(GL_BLEND);
  10413. }
  10414. _sg_stats_inc(gl.num_render_state);
  10415. }
  10416. if ((state_bs->src_factor_rgb != cache_bs->src_factor_rgb) ||
  10417. (state_bs->dst_factor_rgb != cache_bs->dst_factor_rgb) ||
  10418. (state_bs->src_factor_alpha != cache_bs->src_factor_alpha) ||
  10419. (state_bs->dst_factor_alpha != cache_bs->dst_factor_alpha))
  10420. {
  10421. cache_bs->src_factor_rgb = state_bs->src_factor_rgb;
  10422. cache_bs->dst_factor_rgb = state_bs->dst_factor_rgb;
  10423. cache_bs->src_factor_alpha = state_bs->src_factor_alpha;
  10424. cache_bs->dst_factor_alpha = state_bs->dst_factor_alpha;
  10425. glBlendFuncSeparate(_sg_gl_blend_factor(state_bs->src_factor_rgb),
  10426. _sg_gl_blend_factor(state_bs->dst_factor_rgb),
  10427. _sg_gl_blend_factor(state_bs->src_factor_alpha),
  10428. _sg_gl_blend_factor(state_bs->dst_factor_alpha));
  10429. _sg_stats_inc(gl.num_render_state);
  10430. }
  10431. if ((state_bs->op_rgb != cache_bs->op_rgb) || (state_bs->op_alpha != cache_bs->op_alpha)) {
  10432. cache_bs->op_rgb = state_bs->op_rgb;
  10433. cache_bs->op_alpha = state_bs->op_alpha;
  10434. glBlendEquationSeparate(_sg_gl_blend_op(state_bs->op_rgb), _sg_gl_blend_op(state_bs->op_alpha));
  10435. _sg_stats_inc(gl.num_render_state);
  10436. }
  10437. // standalone color target state
  10438. for (GLuint i = 0; i < (GLuint)pip->cmn.color_count; i++) {
  10439. if (pip->gl.color_write_mask[i] != _sg.gl.cache.color_write_mask[i]) {
  10440. const sg_color_mask cm = pip->gl.color_write_mask[i];
  10441. _sg.gl.cache.color_write_mask[i] = cm;
  10442. #ifdef SOKOL_GLCORE
  10443. glColorMaski(i,
  10444. (cm & SG_COLORMASK_R) != 0,
  10445. (cm & SG_COLORMASK_G) != 0,
  10446. (cm & SG_COLORMASK_B) != 0,
  10447. (cm & SG_COLORMASK_A) != 0);
  10448. #else
  10449. if (0 == i) {
  10450. glColorMask((cm & SG_COLORMASK_R) != 0,
  10451. (cm & SG_COLORMASK_G) != 0,
  10452. (cm & SG_COLORMASK_B) != 0,
  10453. (cm & SG_COLORMASK_A) != 0);
  10454. }
  10455. #endif
  10456. _sg_stats_inc(gl.num_render_state);
  10457. }
  10458. }
  10459. if (!_sg_fequal(pip->cmn.blend_color.r, _sg.gl.cache.blend_color.r, 0.0001f) ||
  10460. !_sg_fequal(pip->cmn.blend_color.g, _sg.gl.cache.blend_color.g, 0.0001f) ||
  10461. !_sg_fequal(pip->cmn.blend_color.b, _sg.gl.cache.blend_color.b, 0.0001f) ||
  10462. !_sg_fequal(pip->cmn.blend_color.a, _sg.gl.cache.blend_color.a, 0.0001f))
  10463. {
  10464. sg_color c = pip->cmn.blend_color;
  10465. _sg.gl.cache.blend_color = c;
  10466. glBlendColor(c.r, c.g, c.b, c.a);
  10467. _sg_stats_inc(gl.num_render_state);
  10468. }
  10469. } // pip->cmn.color_count > 0
  10470. if (pip->gl.cull_mode != _sg.gl.cache.cull_mode) {
  10471. _sg.gl.cache.cull_mode = pip->gl.cull_mode;
  10472. if (SG_CULLMODE_NONE == pip->gl.cull_mode) {
  10473. glDisable(GL_CULL_FACE);
  10474. _sg_stats_inc(gl.num_render_state);
  10475. } else {
  10476. glEnable(GL_CULL_FACE);
  10477. GLenum gl_mode = (SG_CULLMODE_FRONT == pip->gl.cull_mode) ? GL_FRONT : GL_BACK;
  10478. glCullFace(gl_mode);
  10479. _sg_stats_add(gl.num_render_state, 2);
  10480. }
  10481. }
  10482. if (pip->gl.face_winding != _sg.gl.cache.face_winding) {
  10483. _sg.gl.cache.face_winding = pip->gl.face_winding;
  10484. GLenum gl_winding = (SG_FACEWINDING_CW == pip->gl.face_winding) ? GL_CW : GL_CCW;
  10485. glFrontFace(gl_winding);
  10486. _sg_stats_inc(gl.num_render_state);
  10487. }
  10488. if (pip->gl.alpha_to_coverage_enabled != _sg.gl.cache.alpha_to_coverage_enabled) {
  10489. _sg.gl.cache.alpha_to_coverage_enabled = pip->gl.alpha_to_coverage_enabled;
  10490. if (pip->gl.alpha_to_coverage_enabled) {
  10491. glEnable(GL_SAMPLE_ALPHA_TO_COVERAGE);
  10492. } else {
  10493. glDisable(GL_SAMPLE_ALPHA_TO_COVERAGE);
  10494. }
  10495. _sg_stats_inc(gl.num_render_state);
  10496. }
  10497. #ifdef SOKOL_GLCORE
  10498. if (pip->gl.sample_count != _sg.gl.cache.sample_count) {
  10499. _sg.gl.cache.sample_count = pip->gl.sample_count;
  10500. if (pip->gl.sample_count > 1) {
  10501. glEnable(GL_MULTISAMPLE);
  10502. } else {
  10503. glDisable(GL_MULTISAMPLE);
  10504. }
  10505. _sg_stats_inc(gl.num_render_state);
  10506. }
  10507. #endif
  10508. }
  10509. _SOKOL_PRIVATE void _sg_gl_apply_pipeline(_sg_pipeline_t* pip) {
  10510. SOKOL_ASSERT(pip);
  10511. _SG_GL_CHECK_ERROR();
  10512. if (!_sg_sref_slot_eql(&_sg.gl.cache.cur_pip, &pip->slot)) {
  10513. _sg.gl.cache.cur_pip = _sg_sref(&pip->slot);
  10514. // bind shader program
  10515. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  10516. if (shd->gl.prog != _sg.gl.cache.prog) {
  10517. _sg.gl.cache.prog = shd->gl.prog;
  10518. glUseProgram(shd->gl.prog);
  10519. _sg_stats_inc(gl.num_use_program);
  10520. }
  10521. if (!pip->cmn.is_compute) {
  10522. _sg_gl_apply_render_pipeline_state(pip);
  10523. }
  10524. }
  10525. _SG_GL_CHECK_ERROR();
  10526. }
  10527. _SOKOL_PRIVATE bool _sg_gl_apply_bindings(_sg_bindings_ptrs_t* bnd) {
  10528. SOKOL_ASSERT(bnd);
  10529. SOKOL_ASSERT(bnd->pip);
  10530. _SG_GL_CHECK_ERROR();
  10531. const _sg_shader_t* shd = _sg_shader_ref_ptr(&bnd->pip->cmn.shader);
  10532. // bind combined texture-samplers
  10533. _SG_GL_CHECK_ERROR();
  10534. for (size_t tex_smp_index = 0; tex_smp_index < SG_MAX_TEXTURE_SAMPLER_PAIRS; tex_smp_index++) {
  10535. const _sg_shader_texture_sampler_t* tex_smp = &shd->cmn.texture_samplers[tex_smp_index];
  10536. if (tex_smp->stage == SG_SHADERSTAGE_NONE) {
  10537. continue;
  10538. }
  10539. const int8_t gl_tex_slot = (GLint)shd->gl.tex_slot[tex_smp_index];
  10540. if (gl_tex_slot != -1) {
  10541. SOKOL_ASSERT(tex_smp->view_slot < SG_MAX_VIEW_BINDSLOTS);
  10542. SOKOL_ASSERT(tex_smp->sampler_slot < SG_MAX_SAMPLER_BINDSLOTS);
  10543. const _sg_view_t* view = bnd->views[tex_smp->view_slot];
  10544. const _sg_sampler_t* smp = bnd->smps[tex_smp->sampler_slot];
  10545. SOKOL_ASSERT(view);
  10546. SOKOL_ASSERT(smp);
  10547. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  10548. const GLenum gl_tgt = img->gl.target;
  10549. const GLuint gl_smp = smp->gl.smp;
  10550. GLuint gl_tex;
  10551. if (_sg.features.gl_texture_views) {
  10552. gl_tex = view->gl.tex_view[img->cmn.active_slot];
  10553. } else {
  10554. gl_tex = img->gl.tex[img->cmn.active_slot];
  10555. }
  10556. _sg_gl_cache_bind_texture_sampler(gl_tex_slot, gl_tgt, gl_tex, gl_smp);
  10557. }
  10558. }
  10559. _SG_GL_CHECK_ERROR();
  10560. // bind storage buffer and images
  10561. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  10562. if (shd->cmn.views[i].stage == SG_SHADERSTAGE_NONE) {
  10563. continue;
  10564. }
  10565. const _sg_view_t* view = bnd->views[i];
  10566. if (view->cmn.type == SG_VIEWTYPE_STORAGEBUFFER) {
  10567. const _sg_buffer_t* sbuf = _sg_buffer_ref_ptr(&view->cmn.buf.ref);
  10568. const uint8_t gl_binding = shd->gl.sbuf_binding[i];
  10569. GLuint gl_sbuf = sbuf->gl.buf[sbuf->cmn.active_slot];
  10570. _sg_gl_cache_bind_storage_buffer(gl_binding, gl_sbuf, view->cmn.buf.offset, sbuf->cmn.size);
  10571. } else if (view->cmn.type == SG_VIEWTYPE_STORAGEIMAGE) {
  10572. #if defined(_SOKOL_GL_HAS_COMPUTE)
  10573. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  10574. const uint8_t gl_unit = shd->gl.simg_binding[i];
  10575. SOKOL_ASSERT((int)gl_unit < _sg.limits.max_storage_image_bindings_per_stage);
  10576. GLuint gl_tex = img->gl.tex[img->cmn.active_slot];
  10577. GLint level = (GLint)view->cmn.img.mip_level;
  10578. GLint layer = (GLint)view->cmn.img.slice;
  10579. GLboolean layered = shd->cmn.views[i].image_type != SG_IMAGETYPE_2D;
  10580. GLenum access = shd->cmn.views[i].simg_writeonly ? GL_WRITE_ONLY : GL_READ_WRITE;
  10581. GLenum format = _sg_gl_teximage_internal_format(shd->cmn.views[i].access_format);
  10582. // NOTE: we specifically don't go through the GL cache since storage images
  10583. // are not supported on WebGL2, and on native platforms call caching isn't
  10584. // worth the hassle
  10585. glBindImageTexture(gl_unit, gl_tex, level, layered, layer, access, format);
  10586. _sg_stats_inc(gl.num_bind_image_texture);
  10587. #endif
  10588. }
  10589. }
  10590. _SG_GL_CHECK_ERROR();
  10591. if (!bnd->pip->cmn.is_compute) {
  10592. // index buffer (can be 0)
  10593. const GLuint gl_ib = bnd->ib ? bnd->ib->gl.buf[bnd->ib->cmn.active_slot] : 0;
  10594. _sg_gl_cache_bind_buffer(GL_ELEMENT_ARRAY_BUFFER, gl_ib);
  10595. _sg.gl.cache.cur_ib_offset = bnd->ib_offset;
  10596. // vertex attributes
  10597. for (GLuint attr_index = 0; attr_index < (GLuint)_sg.limits.max_vertex_attrs; attr_index++) {
  10598. _sg_gl_attr_t* attr = &bnd->pip->gl.attrs[attr_index];
  10599. _sg_gl_cache_attr_t* cache_attr = &_sg.gl.cache.attrs[attr_index];
  10600. bool cache_attr_dirty = false;
  10601. int vb_offset = 0;
  10602. GLuint gl_vb = 0;
  10603. if (attr->vb_index >= 0) {
  10604. // attribute is enabled
  10605. SOKOL_ASSERT(attr->vb_index < SG_MAX_VERTEXBUFFER_BINDSLOTS);
  10606. _sg_buffer_t* vb = bnd->vbs[attr->vb_index];
  10607. SOKOL_ASSERT(vb);
  10608. gl_vb = vb->gl.buf[vb->cmn.active_slot];
  10609. vb_offset = bnd->vb_offsets[attr->vb_index] + attr->offset;
  10610. if ((gl_vb != cache_attr->gl_vbuf) ||
  10611. (attr->size != cache_attr->gl_attr.size) ||
  10612. (attr->type != cache_attr->gl_attr.type) ||
  10613. (attr->normalized != cache_attr->gl_attr.normalized) ||
  10614. (attr->base_type != cache_attr->gl_attr.base_type) ||
  10615. (attr->stride != cache_attr->gl_attr.stride) ||
  10616. (vb_offset != cache_attr->gl_attr.offset) ||
  10617. (cache_attr->gl_attr.divisor != attr->divisor))
  10618. {
  10619. _sg_gl_cache_bind_buffer(GL_ARRAY_BUFFER, gl_vb);
  10620. if (attr->base_type == SG_SHADERATTRBASETYPE_FLOAT) {
  10621. glVertexAttribPointer(attr_index, attr->size, attr->type, attr->normalized, attr->stride, (const GLvoid*)(GLintptr)vb_offset);
  10622. } else {
  10623. glVertexAttribIPointer(attr_index, attr->size, attr->type, attr->stride, (const GLvoid*)(GLintptr)vb_offset);
  10624. }
  10625. _sg_stats_inc(gl.num_vertex_attrib_pointer);
  10626. glVertexAttribDivisor(attr_index, (GLuint)attr->divisor);
  10627. _sg_stats_inc(gl.num_vertex_attrib_divisor);
  10628. cache_attr_dirty = true;
  10629. }
  10630. if (cache_attr->gl_attr.vb_index == -1) {
  10631. glEnableVertexAttribArray(attr_index);
  10632. _sg_stats_inc(gl.num_enable_vertex_attrib_array);
  10633. cache_attr_dirty = true;
  10634. }
  10635. } else {
  10636. // attribute is disabled
  10637. if (cache_attr->gl_attr.vb_index != -1) {
  10638. glDisableVertexAttribArray(attr_index);
  10639. _sg_stats_inc(gl.num_disable_vertex_attrib_array);
  10640. cache_attr_dirty = true;
  10641. }
  10642. }
  10643. if (cache_attr_dirty) {
  10644. cache_attr->gl_attr = *attr;
  10645. cache_attr->gl_attr.offset = vb_offset;
  10646. cache_attr->gl_vbuf = gl_vb;
  10647. }
  10648. }
  10649. _SG_GL_CHECK_ERROR();
  10650. }
  10651. // take care of storage resource memory barriers (this needs to happen after the bindings are set)
  10652. #if defined(_SOKOL_GL_HAS_COMPUTE)
  10653. _sg_gl_handle_memory_barriers(shd, bnd, 0);
  10654. _SG_GL_CHECK_ERROR();
  10655. #endif
  10656. return true;
  10657. }
  10658. _SOKOL_PRIVATE void _sg_gl_apply_uniforms(int ub_slot, const sg_range* data) {
  10659. SOKOL_ASSERT((ub_slot >= 0) && (ub_slot < SG_MAX_UNIFORMBLOCK_BINDSLOTS));
  10660. const _sg_pipeline_t* pip = _sg_pipeline_ref_ptr(&_sg.cur_pip);
  10661. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  10662. SOKOL_ASSERT(SG_SHADERSTAGE_NONE != shd->cmn.uniform_blocks[ub_slot].stage);
  10663. SOKOL_ASSERT(data->size == shd->cmn.uniform_blocks[ub_slot].size);
  10664. const _sg_gl_uniform_block_t* gl_ub = &shd->gl.uniform_blocks[ub_slot];
  10665. for (int u_index = 0; u_index < gl_ub->num_uniforms; u_index++) {
  10666. const _sg_gl_uniform_t* u = &gl_ub->uniforms[u_index];
  10667. SOKOL_ASSERT(u->type != SG_UNIFORMTYPE_INVALID);
  10668. if (u->gl_loc == -1) {
  10669. continue;
  10670. }
  10671. _sg_stats_inc(gl.num_uniform);
  10672. GLfloat* fptr = (GLfloat*) (((uint8_t*)data->ptr) + u->offset);
  10673. GLint* iptr = (GLint*) (((uint8_t*)data->ptr) + u->offset);
  10674. switch (u->type) {
  10675. case SG_UNIFORMTYPE_INVALID:
  10676. break;
  10677. case SG_UNIFORMTYPE_FLOAT:
  10678. glUniform1fv(u->gl_loc, u->count, fptr);
  10679. break;
  10680. case SG_UNIFORMTYPE_FLOAT2:
  10681. glUniform2fv(u->gl_loc, u->count, fptr);
  10682. break;
  10683. case SG_UNIFORMTYPE_FLOAT3:
  10684. glUniform3fv(u->gl_loc, u->count, fptr);
  10685. break;
  10686. case SG_UNIFORMTYPE_FLOAT4:
  10687. glUniform4fv(u->gl_loc, u->count, fptr);
  10688. break;
  10689. case SG_UNIFORMTYPE_INT:
  10690. glUniform1iv(u->gl_loc, u->count, iptr);
  10691. break;
  10692. case SG_UNIFORMTYPE_INT2:
  10693. glUniform2iv(u->gl_loc, u->count, iptr);
  10694. break;
  10695. case SG_UNIFORMTYPE_INT3:
  10696. glUniform3iv(u->gl_loc, u->count, iptr);
  10697. break;
  10698. case SG_UNIFORMTYPE_INT4:
  10699. glUniform4iv(u->gl_loc, u->count, iptr);
  10700. break;
  10701. case SG_UNIFORMTYPE_MAT4:
  10702. glUniformMatrix4fv(u->gl_loc, u->count, GL_FALSE, fptr);
  10703. break;
  10704. default:
  10705. SOKOL_UNREACHABLE;
  10706. break;
  10707. }
  10708. }
  10709. }
  10710. _SOKOL_PRIVATE void _sg_gl_draw(int base_element, int num_elements, int num_instances, int base_vertex, int base_instance) {
  10711. const GLenum p_type = _sg.gl.cache.cur_primitive_type;
  10712. const bool use_instanced_draw = (num_instances > 1) || _sg.use_instanced_draw;
  10713. if (_sg.use_indexed_draw) {
  10714. // indexed rendering
  10715. const GLenum i_type = _sg.gl.cache.cur_index_type;
  10716. const int i_size = (i_type == GL_UNSIGNED_SHORT) ? 2 : 4;
  10717. const int ib_offset = _sg.gl.cache.cur_ib_offset;
  10718. const GLvoid* indices = (const GLvoid*)(GLintptr)(base_element*i_size+ib_offset);
  10719. if (use_instanced_draw) {
  10720. if ((base_vertex == 0) && (base_instance == 0)) {
  10721. glDrawElementsInstanced(p_type, num_elements, i_type, indices, num_instances);
  10722. } else if ((base_vertex != 0) && (base_instance == 0) && _sg.features.draw_base_vertex) {
  10723. #if defined(_SOKOL_GL_HAS_BASEVERTEX)
  10724. glDrawElementsInstancedBaseVertex(p_type, num_elements, i_type, indices, num_instances, base_vertex);
  10725. #endif
  10726. } else if ((base_instance != 0) && _sg.features.draw_base_instance) {
  10727. #if defined(_SOKOL_GL_HAS_BASEINSTANCE)
  10728. glDrawElementsInstancedBaseVertexBaseInstance(p_type, num_elements, i_type, indices, num_instances, base_vertex, (GLuint)base_instance);
  10729. #endif
  10730. }
  10731. } else {
  10732. if (base_vertex == 0) {
  10733. glDrawElements(p_type, num_elements, i_type, indices);
  10734. } else if (_sg.features.draw_base_vertex) {
  10735. #if defined(_SOKOL_GL_HAS_BASEVERTEX)
  10736. glDrawElementsBaseVertex(p_type, num_elements, i_type, indices, base_vertex);
  10737. #endif
  10738. }
  10739. }
  10740. } else {
  10741. // non-indexed rendering
  10742. if (use_instanced_draw) {
  10743. if (base_instance == 0) {
  10744. glDrawArraysInstanced(p_type, base_element, num_elements, num_instances);
  10745. } else if (_sg.features.draw_base_instance) {
  10746. #if defined(_SOKOL_GL_HAS_BASEINSTANCE)
  10747. glDrawArraysInstancedBaseInstance(p_type, base_element, num_elements, num_instances, (GLuint)base_instance);
  10748. #endif
  10749. }
  10750. } else {
  10751. glDrawArrays(p_type, base_element, num_elements);
  10752. }
  10753. }
  10754. }
  10755. _SOKOL_PRIVATE void _sg_gl_dispatch(int num_groups_x, int num_groups_y, int num_groups_z) {
  10756. #if defined(_SOKOL_GL_HAS_COMPUTE)
  10757. if (!_sg.features.compute) {
  10758. return;
  10759. }
  10760. glDispatchCompute((GLuint)num_groups_x, (GLuint)num_groups_y, (GLuint)num_groups_z);
  10761. #else
  10762. (void)num_groups_x; (void)num_groups_y; (void)num_groups_z;
  10763. #endif
  10764. }
  10765. _SOKOL_PRIVATE void _sg_gl_commit(void) {
  10766. // "soft" clear bindings (only those that are actually bound)
  10767. _sg_gl_cache_clear_buffer_bindings(false);
  10768. _sg_gl_cache_clear_texture_sampler_bindings(false);
  10769. }
  10770. _SOKOL_PRIVATE void _sg_gl_update_buffer(_sg_buffer_t* buf, const sg_range* data) {
  10771. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  10772. // only one update per buffer per frame allowed
  10773. if (++buf->cmn.active_slot >= buf->cmn.num_slots) {
  10774. buf->cmn.active_slot = 0;
  10775. }
  10776. GLenum gl_tgt = _sg_gl_buffer_target(&buf->cmn.usage);
  10777. SOKOL_ASSERT(buf->cmn.active_slot < SG_NUM_INFLIGHT_FRAMES);
  10778. GLuint gl_buf = buf->gl.buf[buf->cmn.active_slot];
  10779. SOKOL_ASSERT(gl_buf);
  10780. _SG_GL_CHECK_ERROR();
  10781. _sg_gl_cache_store_buffer_binding(gl_tgt);
  10782. _sg_gl_cache_bind_buffer(gl_tgt, gl_buf);
  10783. glBufferSubData(gl_tgt, 0, (GLsizeiptr)data->size, data->ptr);
  10784. _sg_gl_cache_restore_buffer_binding(gl_tgt);
  10785. _SG_GL_CHECK_ERROR();
  10786. }
  10787. _SOKOL_PRIVATE void _sg_gl_append_buffer(_sg_buffer_t* buf, const sg_range* data, bool new_frame) {
  10788. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  10789. if (new_frame) {
  10790. if (++buf->cmn.active_slot >= buf->cmn.num_slots) {
  10791. buf->cmn.active_slot = 0;
  10792. }
  10793. }
  10794. GLenum gl_tgt = _sg_gl_buffer_target(&buf->cmn.usage);
  10795. SOKOL_ASSERT(buf->cmn.active_slot < SG_NUM_INFLIGHT_FRAMES);
  10796. GLuint gl_buf = buf->gl.buf[buf->cmn.active_slot];
  10797. SOKOL_ASSERT(gl_buf);
  10798. _SG_GL_CHECK_ERROR();
  10799. _sg_gl_cache_store_buffer_binding(gl_tgt);
  10800. _sg_gl_cache_bind_buffer(gl_tgt, gl_buf);
  10801. glBufferSubData(gl_tgt, buf->cmn.append_pos, (GLsizeiptr)data->size, data->ptr);
  10802. _sg_gl_cache_restore_buffer_binding(gl_tgt);
  10803. _SG_GL_CHECK_ERROR();
  10804. }
  10805. _SOKOL_PRIVATE void _sg_gl_update_image(_sg_image_t* img, const sg_image_data* data) {
  10806. SOKOL_ASSERT(img && data);
  10807. // only one update per image per frame allowed
  10808. if (++img->cmn.active_slot >= img->cmn.num_slots) {
  10809. img->cmn.active_slot = 0;
  10810. }
  10811. SOKOL_ASSERT(img->cmn.active_slot < SG_NUM_INFLIGHT_FRAMES);
  10812. SOKOL_ASSERT(0 != img->gl.tex[img->cmn.active_slot]);
  10813. _sg_gl_cache_store_texture_sampler_binding(0);
  10814. _sg_gl_cache_bind_texture_sampler(0, img->gl.target, img->gl.tex[img->cmn.active_slot], 0);
  10815. const int num_mips = img->cmn.num_mipmaps;
  10816. for (int mip_index = 0; mip_index < num_mips; mip_index++) {
  10817. const GLvoid* data_ptr = data->mip_levels[mip_index].ptr;
  10818. const GLsizei data_size = (GLsizei)data->mip_levels[mip_index].size;
  10819. const int mip_width = _sg_miplevel_dim(img->cmn.width, mip_index);
  10820. const int mip_height = _sg_miplevel_dim(img->cmn.height, mip_index);
  10821. const int mip_depth = (SG_IMAGETYPE_3D == img->cmn.type) ? _sg_miplevel_dim(img->cmn.num_slices, mip_index) : img->cmn.num_slices;
  10822. if (SG_IMAGETYPE_CUBE == img->cmn.type) {
  10823. const int surf_pitch = _sg_surface_pitch(img->cmn.pixel_format, mip_width, mip_height, 1);
  10824. SOKOL_ASSERT((6 * surf_pitch) <= data_size);
  10825. const uint8_t* surf_ptr = (const uint8_t*) data_ptr;
  10826. for (int i = 0; i < 6; i++) {
  10827. const GLenum gl_img_target = _sg_gl_cubeface_target(i);
  10828. _sg_gl_texsubimage(img, gl_img_target, mip_index, mip_width, mip_height, mip_depth, surf_ptr, surf_pitch);
  10829. surf_ptr += surf_pitch;
  10830. }
  10831. } else {
  10832. _sg_gl_texsubimage(img, img->gl.target, mip_index, mip_width, mip_height, mip_depth, data_ptr, data_size);
  10833. }
  10834. }
  10835. _sg_gl_cache_restore_texture_sampler_binding(0);
  10836. }
  10837. // ██████ ██████ ██████ ██ ██ ██████ █████ ██████ ██ ██ ███████ ███ ██ ██████
  10838. // ██ ██ ██ ██ ██ ███ ███ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ ██ ██
  10839. // ██ ██ █████ ██ ██ ██ ██ ██████ ███████ ██ █████ █████ ██ ██ ██ ██ ██
  10840. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  10841. // ██████ ██████ ██████ ██ ██ ██████ ██ ██ ██████ ██ ██ ███████ ██ ████ ██████
  10842. //
  10843. // >>d3d11 backend
  10844. #elif defined(SOKOL_D3D11)
  10845. #if defined(__cplusplus)
  10846. #define _sg_d3d11_AddRef(self) (self)->AddRef()
  10847. #else
  10848. #define _sg_d3d11_AddRef(self) (self)->lpVtbl->AddRef(self)
  10849. #endif
  10850. #if defined(__cplusplus)
  10851. #define _sg_d3d11_Release(self) (self)->Release()
  10852. #else
  10853. #define _sg_d3d11_Release(self) (self)->lpVtbl->Release(self)
  10854. #endif
  10855. // NOTE: This needs to be a macro since we can't use the polymorphism in C. It's called on many kinds of resources.
  10856. // NOTE: Based on microsoft docs, it's fine to call this with pData=NULL if DataSize is also zero.
  10857. #if defined(__cplusplus)
  10858. #define _sg_d3d11_SetPrivateData(self, guid, DataSize, pData) (self)->SetPrivateData(guid, DataSize, pData)
  10859. #else
  10860. #define _sg_d3d11_SetPrivateData(self, guid, DataSize, pData) (self)->lpVtbl->SetPrivateData(self, guid, DataSize, pData)
  10861. #endif
  10862. #if defined(__cplusplus)
  10863. #define _sg_win32_refguid(guid) guid
  10864. #else
  10865. #define _sg_win32_refguid(guid) &guid
  10866. #endif
  10867. static const GUID _sg_d3d11_WKPDID_D3DDebugObjectName = { 0x429b8c22,0x9188,0x4b0c, {0x87,0x42,0xac,0xb0,0xbf,0x85,0xc2,0x00} };
  10868. #if defined(SOKOL_DEBUG)
  10869. #define _sg_d3d11_setlabel(self, label) _sg_d3d11_SetPrivateData(self, _sg_win32_refguid(_sg_d3d11_WKPDID_D3DDebugObjectName), label ? (UINT)strlen(label) : 0, label)
  10870. #else
  10871. #define _sg_d3d11_setlabel(self, label)
  10872. #endif
  10873. //-- D3D11 C/C++ wrappers ------------------------------------------------------
  10874. static inline HRESULT _sg_d3d11_CheckFormatSupport(ID3D11Device* self, DXGI_FORMAT Format, UINT* pFormatSupport) {
  10875. #if defined(__cplusplus)
  10876. return self->CheckFormatSupport(Format, pFormatSupport);
  10877. #else
  10878. return self->lpVtbl->CheckFormatSupport(self, Format, pFormatSupport);
  10879. #endif
  10880. }
  10881. static inline void _sg_d3d11_OMSetRenderTargets(ID3D11DeviceContext* self, UINT NumViews, ID3D11RenderTargetView* const* ppRenderTargetViews, ID3D11DepthStencilView *pDepthStencilView) {
  10882. #if defined(__cplusplus)
  10883. self->OMSetRenderTargets(NumViews, ppRenderTargetViews, pDepthStencilView);
  10884. #else
  10885. self->lpVtbl->OMSetRenderTargets(self, NumViews, ppRenderTargetViews, pDepthStencilView);
  10886. #endif
  10887. }
  10888. static inline void _sg_d3d11_RSSetState(ID3D11DeviceContext* self, ID3D11RasterizerState* pRasterizerState) {
  10889. #if defined(__cplusplus)
  10890. self->RSSetState(pRasterizerState);
  10891. #else
  10892. self->lpVtbl->RSSetState(self, pRasterizerState);
  10893. #endif
  10894. }
  10895. static inline void _sg_d3d11_OMSetDepthStencilState(ID3D11DeviceContext* self, ID3D11DepthStencilState* pDepthStencilState, UINT StencilRef) {
  10896. #if defined(__cplusplus)
  10897. self->OMSetDepthStencilState(pDepthStencilState, StencilRef);
  10898. #else
  10899. self->lpVtbl->OMSetDepthStencilState(self, pDepthStencilState, StencilRef);
  10900. #endif
  10901. }
  10902. static inline void _sg_d3d11_OMSetBlendState(ID3D11DeviceContext* self, ID3D11BlendState* pBlendState, const FLOAT BlendFactor[4], UINT SampleMask) {
  10903. #if defined(__cplusplus)
  10904. self->OMSetBlendState(pBlendState, BlendFactor, SampleMask);
  10905. #else
  10906. self->lpVtbl->OMSetBlendState(self, pBlendState, BlendFactor, SampleMask);
  10907. #endif
  10908. }
  10909. static inline void _sg_d3d11_IASetVertexBuffers(ID3D11DeviceContext* self, UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppVertexBuffers, const UINT* pStrides, const UINT* pOffsets) {
  10910. #if defined(__cplusplus)
  10911. self->IASetVertexBuffers(StartSlot, NumBuffers, ppVertexBuffers, pStrides, pOffsets);
  10912. #else
  10913. self->lpVtbl->IASetVertexBuffers(self, StartSlot, NumBuffers, ppVertexBuffers, pStrides, pOffsets);
  10914. #endif
  10915. }
  10916. static inline void _sg_d3d11_IASetIndexBuffer(ID3D11DeviceContext* self, ID3D11Buffer* pIndexBuffer, DXGI_FORMAT Format, UINT Offset) {
  10917. #if defined(__cplusplus)
  10918. self->IASetIndexBuffer(pIndexBuffer, Format, Offset);
  10919. #else
  10920. self->lpVtbl->IASetIndexBuffer(self, pIndexBuffer, Format, Offset);
  10921. #endif
  10922. }
  10923. static inline void _sg_d3d11_IASetInputLayout(ID3D11DeviceContext* self, ID3D11InputLayout* pInputLayout) {
  10924. #if defined(__cplusplus)
  10925. self->IASetInputLayout(pInputLayout);
  10926. #else
  10927. self->lpVtbl->IASetInputLayout(self, pInputLayout);
  10928. #endif
  10929. }
  10930. static inline void _sg_d3d11_VSSetShader(ID3D11DeviceContext* self, ID3D11VertexShader* pVertexShader, ID3D11ClassInstance* const* ppClassInstances, UINT NumClassInstances) {
  10931. #if defined(__cplusplus)
  10932. self->VSSetShader(pVertexShader, ppClassInstances, NumClassInstances);
  10933. #else
  10934. self->lpVtbl->VSSetShader(self, pVertexShader, ppClassInstances, NumClassInstances);
  10935. #endif
  10936. }
  10937. static inline void _sg_d3d11_PSSetShader(ID3D11DeviceContext* self, ID3D11PixelShader* pPixelShader, ID3D11ClassInstance* const* ppClassInstances, UINT NumClassInstances) {
  10938. #if defined(__cplusplus)
  10939. self->PSSetShader(pPixelShader, ppClassInstances, NumClassInstances);
  10940. #else
  10941. self->lpVtbl->PSSetShader(self, pPixelShader, ppClassInstances, NumClassInstances);
  10942. #endif
  10943. }
  10944. static inline void _sg_d3d11_CSSetShader(ID3D11DeviceContext* self, ID3D11ComputeShader* pComputeShader, ID3D11ClassInstance* const* ppClassInstances, UINT NumClassInstances) {
  10945. #if defined(__cplusplus)
  10946. self->CSSetShader(pComputeShader, ppClassInstances, NumClassInstances);
  10947. #else
  10948. self->lpVtbl->CSSetShader(self, pComputeShader, ppClassInstances, NumClassInstances);
  10949. #endif
  10950. }
  10951. static inline void _sg_d3d11_VSSetConstantBuffers(ID3D11DeviceContext* self, UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppConstantBuffers) {
  10952. #if defined(__cplusplus)
  10953. self->VSSetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
  10954. #else
  10955. self->lpVtbl->VSSetConstantBuffers(self, StartSlot, NumBuffers, ppConstantBuffers);
  10956. #endif
  10957. }
  10958. static inline void _sg_d3d11_PSSetConstantBuffers(ID3D11DeviceContext* self, UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppConstantBuffers) {
  10959. #if defined(__cplusplus)
  10960. self->PSSetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
  10961. #else
  10962. self->lpVtbl->PSSetConstantBuffers(self, StartSlot, NumBuffers, ppConstantBuffers);
  10963. #endif
  10964. }
  10965. static inline void _sg_d3d11_CSSetConstantBuffers(ID3D11DeviceContext* self, UINT StartSlot, UINT NumBuffers, ID3D11Buffer* const* ppConstantBuffers) {
  10966. #if defined(__cplusplus)
  10967. self->CSSetConstantBuffers(StartSlot, NumBuffers, ppConstantBuffers);
  10968. #else
  10969. self->lpVtbl->CSSetConstantBuffers(self, StartSlot, NumBuffers, ppConstantBuffers);
  10970. #endif
  10971. }
  10972. static inline void _sg_d3d11_VSSetShaderResources(ID3D11DeviceContext* self, UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView* const* ppShaderResourceViews) {
  10973. #if defined(__cplusplus)
  10974. self->VSSetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
  10975. #else
  10976. self->lpVtbl->VSSetShaderResources(self, StartSlot, NumViews, ppShaderResourceViews);
  10977. #endif
  10978. }
  10979. static inline void _sg_d3d11_PSSetShaderResources(ID3D11DeviceContext* self, UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView* const* ppShaderResourceViews) {
  10980. #if defined(__cplusplus)
  10981. self->PSSetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
  10982. #else
  10983. self->lpVtbl->PSSetShaderResources(self, StartSlot, NumViews, ppShaderResourceViews);
  10984. #endif
  10985. }
  10986. static inline void _sg_d3d11_CSSetShaderResources(ID3D11DeviceContext* self, UINT StartSlot, UINT NumViews, ID3D11ShaderResourceView* const* ppShaderResourceViews) {
  10987. #if defined(__cplusplus)
  10988. self->CSSetShaderResources(StartSlot, NumViews, ppShaderResourceViews);
  10989. #else
  10990. self->lpVtbl->CSSetShaderResources(self, StartSlot, NumViews, ppShaderResourceViews);
  10991. #endif
  10992. }
  10993. static inline void _sg_d3d11_VSSetSamplers(ID3D11DeviceContext* self, UINT StartSlot, UINT NumSamplers, ID3D11SamplerState* const* ppSamplers) {
  10994. #if defined(__cplusplus)
  10995. self->VSSetSamplers(StartSlot, NumSamplers, ppSamplers);
  10996. #else
  10997. self->lpVtbl->VSSetSamplers(self, StartSlot, NumSamplers, ppSamplers);
  10998. #endif
  10999. }
  11000. static inline void _sg_d3d11_PSSetSamplers(ID3D11DeviceContext* self, UINT StartSlot, UINT NumSamplers, ID3D11SamplerState* const* ppSamplers) {
  11001. #if defined(__cplusplus)
  11002. self->PSSetSamplers(StartSlot, NumSamplers, ppSamplers);
  11003. #else
  11004. self->lpVtbl->PSSetSamplers(self, StartSlot, NumSamplers, ppSamplers);
  11005. #endif
  11006. }
  11007. static inline void _sg_d3d11_CSSetSamplers(ID3D11DeviceContext* self, UINT StartSlot, UINT NumSamplers, ID3D11SamplerState* const* ppSamplers) {
  11008. #if defined(__cplusplus)
  11009. self->CSSetSamplers(StartSlot, NumSamplers, ppSamplers);
  11010. #else
  11011. self->lpVtbl->CSSetSamplers(self, StartSlot, NumSamplers, ppSamplers);
  11012. #endif
  11013. }
  11014. static inline void _sg_d3d11_CSSetUnorderedAccessViews(ID3D11DeviceContext* self, UINT StartSlot, UINT NumUAVs, ID3D11UnorderedAccessView* const* ppUnorderedAccessViews, const UINT* pUAVInitialCounts) {
  11015. #if defined(__cplusplus)
  11016. self->CSSetUnorderedAccessViews(StartSlot, NumUAVs, ppUnorderedAccessViews, pUAVInitialCounts);
  11017. #else
  11018. self->lpVtbl->CSSetUnorderedAccessViews(self, StartSlot, NumUAVs, ppUnorderedAccessViews, pUAVInitialCounts);
  11019. #endif
  11020. }
  11021. static inline HRESULT _sg_d3d11_CreateBuffer(ID3D11Device* self, const D3D11_BUFFER_DESC* pDesc, const D3D11_SUBRESOURCE_DATA* pInitialData, ID3D11Buffer** ppBuffer) {
  11022. #if defined(__cplusplus)
  11023. return self->CreateBuffer(pDesc, pInitialData, ppBuffer);
  11024. #else
  11025. return self->lpVtbl->CreateBuffer(self, pDesc, pInitialData, ppBuffer);
  11026. #endif
  11027. }
  11028. static inline HRESULT _sg_d3d11_CreateTexture2D(ID3D11Device* self, const D3D11_TEXTURE2D_DESC* pDesc, const D3D11_SUBRESOURCE_DATA* pInitialData, ID3D11Texture2D** ppTexture2D) {
  11029. #if defined(__cplusplus)
  11030. return self->CreateTexture2D(pDesc, pInitialData, ppTexture2D);
  11031. #else
  11032. return self->lpVtbl->CreateTexture2D(self, pDesc, pInitialData, ppTexture2D);
  11033. #endif
  11034. }
  11035. static inline HRESULT _sg_d3d11_CreateShaderResourceView(ID3D11Device* self, ID3D11Resource* pResource, const D3D11_SHADER_RESOURCE_VIEW_DESC* pDesc, ID3D11ShaderResourceView** ppSRView) {
  11036. #if defined(__cplusplus)
  11037. return self->CreateShaderResourceView(pResource, pDesc, ppSRView);
  11038. #else
  11039. return self->lpVtbl->CreateShaderResourceView(self, pResource, pDesc, ppSRView);
  11040. #endif
  11041. }
  11042. static inline HRESULT _sg_d3d11_CreateUnorderedAccessView(ID3D11Device* self, ID3D11Resource* pResource, const D3D11_UNORDERED_ACCESS_VIEW_DESC* pDesc, ID3D11UnorderedAccessView** ppUAVView) {
  11043. #if defined(__cplusplus)
  11044. return self->CreateUnorderedAccessView(pResource, pDesc, ppUAVView);
  11045. #else
  11046. return self->lpVtbl->CreateUnorderedAccessView(self, pResource, pDesc, ppUAVView);
  11047. #endif
  11048. }
  11049. static inline void _sg_d3d11_GetResource(ID3D11View* self, ID3D11Resource** ppResource) {
  11050. #if defined(__cplusplus)
  11051. self->GetResource(ppResource);
  11052. #else
  11053. self->lpVtbl->GetResource(self, ppResource);
  11054. #endif
  11055. }
  11056. static inline HRESULT _sg_d3d11_CreateTexture3D(ID3D11Device* self, const D3D11_TEXTURE3D_DESC* pDesc, const D3D11_SUBRESOURCE_DATA* pInitialData, ID3D11Texture3D** ppTexture3D) {
  11057. #if defined(__cplusplus)
  11058. return self->CreateTexture3D(pDesc, pInitialData, ppTexture3D);
  11059. #else
  11060. return self->lpVtbl->CreateTexture3D(self, pDesc, pInitialData, ppTexture3D);
  11061. #endif
  11062. }
  11063. static inline HRESULT _sg_d3d11_CreateSamplerState(ID3D11Device* self, const D3D11_SAMPLER_DESC* pSamplerDesc, ID3D11SamplerState** ppSamplerState) {
  11064. #if defined(__cplusplus)
  11065. return self->CreateSamplerState(pSamplerDesc, ppSamplerState);
  11066. #else
  11067. return self->lpVtbl->CreateSamplerState(self, pSamplerDesc, ppSamplerState);
  11068. #endif
  11069. }
  11070. static inline LPVOID _sg_d3d11_GetBufferPointer(ID3D10Blob* self) {
  11071. #if defined(__cplusplus)
  11072. return self->GetBufferPointer();
  11073. #else
  11074. return self->lpVtbl->GetBufferPointer(self);
  11075. #endif
  11076. }
  11077. static inline SIZE_T _sg_d3d11_GetBufferSize(ID3D10Blob* self) {
  11078. #if defined(__cplusplus)
  11079. return self->GetBufferSize();
  11080. #else
  11081. return self->lpVtbl->GetBufferSize(self);
  11082. #endif
  11083. }
  11084. static inline HRESULT _sg_d3d11_CreateVertexShader(ID3D11Device* self, const void* pShaderBytecode, SIZE_T BytecodeLength, ID3D11ClassLinkage* pClassLinkage, ID3D11VertexShader** ppVertexShader) {
  11085. #if defined(__cplusplus)
  11086. return self->CreateVertexShader(pShaderBytecode, BytecodeLength, pClassLinkage, ppVertexShader);
  11087. #else
  11088. return self->lpVtbl->CreateVertexShader(self, pShaderBytecode, BytecodeLength, pClassLinkage, ppVertexShader);
  11089. #endif
  11090. }
  11091. static inline HRESULT _sg_d3d11_CreatePixelShader(ID3D11Device* self, const void* pShaderBytecode, SIZE_T BytecodeLength, ID3D11ClassLinkage* pClassLinkage, ID3D11PixelShader** ppPixelShader) {
  11092. #if defined(__cplusplus)
  11093. return self->CreatePixelShader(pShaderBytecode, BytecodeLength, pClassLinkage, ppPixelShader);
  11094. #else
  11095. return self->lpVtbl->CreatePixelShader(self, pShaderBytecode, BytecodeLength, pClassLinkage, ppPixelShader);
  11096. #endif
  11097. }
  11098. static inline HRESULT _sg_d3d11_CreateComputeShader(ID3D11Device* self, const void* pShaderBytecode, SIZE_T BytecodeLength, ID3D11ClassLinkage* pClassLinkage, ID3D11ComputeShader** ppComputeShader) {
  11099. #if defined(__cplusplus)
  11100. return self->CreateComputeShader(pShaderBytecode, BytecodeLength, pClassLinkage, ppComputeShader);
  11101. #else
  11102. return self->lpVtbl->CreateComputeShader(self, pShaderBytecode, BytecodeLength, pClassLinkage, ppComputeShader);
  11103. #endif
  11104. }
  11105. static inline HRESULT _sg_d3d11_CreateInputLayout(ID3D11Device* self, const D3D11_INPUT_ELEMENT_DESC* pInputElementDescs, UINT NumElements, const void* pShaderBytecodeWithInputSignature, SIZE_T BytecodeLength, ID3D11InputLayout **ppInputLayout) {
  11106. #if defined(__cplusplus)
  11107. return self->CreateInputLayout(pInputElementDescs, NumElements, pShaderBytecodeWithInputSignature, BytecodeLength, ppInputLayout);
  11108. #else
  11109. return self->lpVtbl->CreateInputLayout(self, pInputElementDescs, NumElements, pShaderBytecodeWithInputSignature, BytecodeLength, ppInputLayout);
  11110. #endif
  11111. }
  11112. static inline HRESULT _sg_d3d11_CreateRasterizerState(ID3D11Device* self, const D3D11_RASTERIZER_DESC* pRasterizerDesc, ID3D11RasterizerState** ppRasterizerState) {
  11113. #if defined(__cplusplus)
  11114. return self->CreateRasterizerState(pRasterizerDesc, ppRasterizerState);
  11115. #else
  11116. return self->lpVtbl->CreateRasterizerState(self, pRasterizerDesc, ppRasterizerState);
  11117. #endif
  11118. }
  11119. static inline HRESULT _sg_d3d11_CreateDepthStencilState(ID3D11Device* self, const D3D11_DEPTH_STENCIL_DESC* pDepthStencilDesc, ID3D11DepthStencilState** ppDepthStencilState) {
  11120. #if defined(__cplusplus)
  11121. return self->CreateDepthStencilState(pDepthStencilDesc, ppDepthStencilState);
  11122. #else
  11123. return self->lpVtbl->CreateDepthStencilState(self, pDepthStencilDesc, ppDepthStencilState);
  11124. #endif
  11125. }
  11126. static inline HRESULT _sg_d3d11_CreateBlendState(ID3D11Device* self, const D3D11_BLEND_DESC* pBlendStateDesc, ID3D11BlendState** ppBlendState) {
  11127. #if defined(__cplusplus)
  11128. return self->CreateBlendState(pBlendStateDesc, ppBlendState);
  11129. #else
  11130. return self->lpVtbl->CreateBlendState(self, pBlendStateDesc, ppBlendState);
  11131. #endif
  11132. }
  11133. static inline HRESULT _sg_d3d11_CreateRenderTargetView(ID3D11Device* self, ID3D11Resource *pResource, const D3D11_RENDER_TARGET_VIEW_DESC* pDesc, ID3D11RenderTargetView** ppRTView) {
  11134. #if defined(__cplusplus)
  11135. return self->CreateRenderTargetView(pResource, pDesc, ppRTView);
  11136. #else
  11137. return self->lpVtbl->CreateRenderTargetView(self, pResource, pDesc, ppRTView);
  11138. #endif
  11139. }
  11140. static inline HRESULT _sg_d3d11_CreateDepthStencilView(ID3D11Device* self, ID3D11Resource* pResource, const D3D11_DEPTH_STENCIL_VIEW_DESC* pDesc, ID3D11DepthStencilView** ppDepthStencilView) {
  11141. #if defined(__cplusplus)
  11142. return self->CreateDepthStencilView(pResource, pDesc, ppDepthStencilView);
  11143. #else
  11144. return self->lpVtbl->CreateDepthStencilView(self, pResource, pDesc, ppDepthStencilView);
  11145. #endif
  11146. }
  11147. static inline void _sg_d3d11_RSSetViewports(ID3D11DeviceContext* self, UINT NumViewports, const D3D11_VIEWPORT* pViewports) {
  11148. #if defined(__cplusplus)
  11149. self->RSSetViewports(NumViewports, pViewports);
  11150. #else
  11151. self->lpVtbl->RSSetViewports(self, NumViewports, pViewports);
  11152. #endif
  11153. }
  11154. static inline void _sg_d3d11_RSSetScissorRects(ID3D11DeviceContext* self, UINT NumRects, const D3D11_RECT* pRects) {
  11155. #if defined(__cplusplus)
  11156. self->RSSetScissorRects(NumRects, pRects);
  11157. #else
  11158. self->lpVtbl->RSSetScissorRects(self, NumRects, pRects);
  11159. #endif
  11160. }
  11161. static inline void _sg_d3d11_ClearRenderTargetView(ID3D11DeviceContext* self, ID3D11RenderTargetView* pRenderTargetView, const FLOAT ColorRGBA[4]) {
  11162. #if defined(__cplusplus)
  11163. self->ClearRenderTargetView(pRenderTargetView, ColorRGBA);
  11164. #else
  11165. self->lpVtbl->ClearRenderTargetView(self, pRenderTargetView, ColorRGBA);
  11166. #endif
  11167. }
  11168. static inline void _sg_d3d11_ClearDepthStencilView(ID3D11DeviceContext* self, ID3D11DepthStencilView* pDepthStencilView, UINT ClearFlags, FLOAT Depth, UINT8 Stencil) {
  11169. #if defined(__cplusplus)
  11170. self->ClearDepthStencilView(pDepthStencilView, ClearFlags, Depth, Stencil);
  11171. #else
  11172. self->lpVtbl->ClearDepthStencilView(self, pDepthStencilView, ClearFlags, Depth, Stencil);
  11173. #endif
  11174. }
  11175. static inline void _sg_d3d11_ResolveSubresource(ID3D11DeviceContext* self, ID3D11Resource* pDstResource, UINT DstSubresource, ID3D11Resource* pSrcResource, UINT SrcSubresource, DXGI_FORMAT Format) {
  11176. #if defined(__cplusplus)
  11177. self->ResolveSubresource(pDstResource, DstSubresource, pSrcResource, SrcSubresource, Format);
  11178. #else
  11179. self->lpVtbl->ResolveSubresource(self, pDstResource, DstSubresource, pSrcResource, SrcSubresource, Format);
  11180. #endif
  11181. }
  11182. static inline void _sg_d3d11_IASetPrimitiveTopology(ID3D11DeviceContext* self, D3D11_PRIMITIVE_TOPOLOGY Topology) {
  11183. #if defined(__cplusplus)
  11184. self->IASetPrimitiveTopology(Topology);
  11185. #else
  11186. self->lpVtbl->IASetPrimitiveTopology(self, Topology);
  11187. #endif
  11188. }
  11189. static inline void _sg_d3d11_UpdateSubresource(ID3D11DeviceContext* self, ID3D11Resource* pDstResource, UINT DstSubresource, const D3D11_BOX* pDstBox, const void* pSrcData, UINT SrcRowPitch, UINT SrcDepthPitch) {
  11190. #if defined(__cplusplus)
  11191. self->UpdateSubresource(pDstResource, DstSubresource, pDstBox, pSrcData, SrcRowPitch, SrcDepthPitch);
  11192. #else
  11193. self->lpVtbl->UpdateSubresource(self, pDstResource, DstSubresource, pDstBox, pSrcData, SrcRowPitch, SrcDepthPitch);
  11194. #endif
  11195. }
  11196. static inline void _sg_d3d11_DrawIndexed(ID3D11DeviceContext* self, UINT IndexCount, UINT StartIndexLocation, INT BaseVertexLocation) {
  11197. #if defined(__cplusplus)
  11198. self->DrawIndexed(IndexCount, StartIndexLocation, BaseVertexLocation);
  11199. #else
  11200. self->lpVtbl->DrawIndexed(self, IndexCount, StartIndexLocation, BaseVertexLocation);
  11201. #endif
  11202. }
  11203. static inline void _sg_d3d11_DrawIndexedInstanced(ID3D11DeviceContext* self, UINT IndexCountPerInstance, UINT InstanceCount, UINT StartIndexLocation, INT BaseVertexLocation, UINT StartInstanceLocation) {
  11204. #if defined(__cplusplus)
  11205. self->DrawIndexedInstanced(IndexCountPerInstance, InstanceCount, StartIndexLocation, BaseVertexLocation, StartInstanceLocation);
  11206. #else
  11207. self->lpVtbl->DrawIndexedInstanced(self, IndexCountPerInstance, InstanceCount, StartIndexLocation, BaseVertexLocation, StartInstanceLocation);
  11208. #endif
  11209. }
  11210. static inline void _sg_d3d11_Draw(ID3D11DeviceContext* self, UINT VertexCount, UINT StartVertexLocation) {
  11211. #if defined(__cplusplus)
  11212. self->Draw(VertexCount, StartVertexLocation);
  11213. #else
  11214. self->lpVtbl->Draw(self, VertexCount, StartVertexLocation);
  11215. #endif
  11216. }
  11217. static inline void _sg_d3d11_DrawInstanced(ID3D11DeviceContext* self, UINT VertexCountPerInstance, UINT InstanceCount, UINT StartVertexLocation, UINT StartInstanceLocation) {
  11218. #if defined(__cplusplus)
  11219. self->DrawInstanced(VertexCountPerInstance, InstanceCount, StartVertexLocation, StartInstanceLocation);
  11220. #else
  11221. self->lpVtbl->DrawInstanced(self, VertexCountPerInstance, InstanceCount, StartVertexLocation, StartInstanceLocation);
  11222. #endif
  11223. }
  11224. static inline void _sg_d3d11_Dispatch(ID3D11DeviceContext* self, UINT ThreadGroupCountX, UINT ThreadGroupCountY, UINT ThreadGroupCountZ) {
  11225. #if defined(__cplusplus)
  11226. self->Dispatch(ThreadGroupCountX, ThreadGroupCountY, ThreadGroupCountZ);
  11227. #else
  11228. self->lpVtbl->Dispatch(self, ThreadGroupCountX, ThreadGroupCountY, ThreadGroupCountZ);
  11229. #endif
  11230. }
  11231. static inline HRESULT _sg_d3d11_Map(ID3D11DeviceContext* self, ID3D11Resource* pResource, UINT Subresource, D3D11_MAP MapType, UINT MapFlags, D3D11_MAPPED_SUBRESOURCE* pMappedResource) {
  11232. #if defined(__cplusplus)
  11233. return self->Map(pResource, Subresource, MapType, MapFlags, pMappedResource);
  11234. #else
  11235. return self->lpVtbl->Map(self, pResource, Subresource, MapType, MapFlags, pMappedResource);
  11236. #endif
  11237. }
  11238. static inline void _sg_d3d11_Unmap(ID3D11DeviceContext* self, ID3D11Resource* pResource, UINT Subresource) {
  11239. #if defined(__cplusplus)
  11240. self->Unmap(pResource, Subresource);
  11241. #else
  11242. self->lpVtbl->Unmap(self, pResource, Subresource);
  11243. #endif
  11244. }
  11245. static inline void _sg_d3d11_ClearState(ID3D11DeviceContext* self) {
  11246. #if defined(__cplusplus)
  11247. self->ClearState();
  11248. #else
  11249. self->lpVtbl->ClearState(self);
  11250. #endif
  11251. }
  11252. static inline D3D_FEATURE_LEVEL _sg_d3d11_GetFeatureLevel(ID3D11Device* self) {
  11253. #if defined(__cplusplus)
  11254. return self->GetFeatureLevel();
  11255. #else
  11256. return self->lpVtbl->GetFeatureLevel(self);
  11257. #endif
  11258. }
  11259. //-- enum translation functions ------------------------------------------------
  11260. _SOKOL_PRIVATE D3D11_USAGE _sg_d3d11_image_usage(const sg_image_usage* usg) {
  11261. if (usg->immutable) {
  11262. if (usg->color_attachment ||
  11263. usg->resolve_attachment ||
  11264. usg->depth_stencil_attachment ||
  11265. usg->storage_image)
  11266. {
  11267. return D3D11_USAGE_DEFAULT;
  11268. } else {
  11269. return D3D11_USAGE_IMMUTABLE;
  11270. }
  11271. } else {
  11272. return D3D11_USAGE_DYNAMIC;
  11273. }
  11274. }
  11275. _SOKOL_PRIVATE UINT _sg_d3d11_image_bind_flags(const sg_image_usage* usg) {
  11276. UINT res = D3D11_BIND_SHADER_RESOURCE;
  11277. if (usg->color_attachment) {
  11278. res |= D3D11_BIND_RENDER_TARGET;
  11279. }
  11280. if (usg->depth_stencil_attachment) {
  11281. res |= D3D11_BIND_DEPTH_STENCIL;
  11282. }
  11283. if (usg->storage_image) {
  11284. res |= D3D11_BIND_UNORDERED_ACCESS;
  11285. }
  11286. return res;
  11287. }
  11288. _SOKOL_PRIVATE UINT _sg_d3d11_image_cpu_access_flags(const sg_image_usage* usg) {
  11289. if (usg->color_attachment ||
  11290. usg->resolve_attachment ||
  11291. usg->depth_stencil_attachment ||
  11292. usg->storage_image ||
  11293. usg->immutable)
  11294. {
  11295. return 0;
  11296. } else {
  11297. return D3D11_CPU_ACCESS_WRITE;
  11298. }
  11299. }
  11300. _SOKOL_PRIVATE D3D11_USAGE _sg_d3d11_buffer_usage(const sg_buffer_usage* usg) {
  11301. if (usg->immutable) {
  11302. return usg->storage_buffer ? D3D11_USAGE_DEFAULT : D3D11_USAGE_IMMUTABLE;
  11303. } else {
  11304. return D3D11_USAGE_DYNAMIC;
  11305. }
  11306. }
  11307. _SOKOL_PRIVATE UINT _sg_d3d11_buffer_bind_flags(const sg_buffer_usage* usg) {
  11308. UINT res = 0;
  11309. if (usg->vertex_buffer) {
  11310. res |= D3D11_BIND_VERTEX_BUFFER;
  11311. }
  11312. if (usg->index_buffer) {
  11313. res |= D3D11_BIND_INDEX_BUFFER;
  11314. }
  11315. if (usg->storage_buffer) {
  11316. res |= D3D11_BIND_SHADER_RESOURCE;
  11317. if (usg->immutable) {
  11318. res |= D3D11_BIND_UNORDERED_ACCESS;
  11319. }
  11320. }
  11321. return res;
  11322. }
  11323. _SOKOL_PRIVATE UINT _sg_d3d11_buffer_misc_flags(const sg_buffer_usage* usg) {
  11324. return usg->storage_buffer ? D3D11_RESOURCE_MISC_BUFFER_ALLOW_RAW_VIEWS : 0;
  11325. }
  11326. _SOKOL_PRIVATE UINT _sg_d3d11_buffer_cpu_access_flags(const sg_buffer_usage* usg) {
  11327. return usg->immutable ? 0 : D3D11_CPU_ACCESS_WRITE;
  11328. }
  11329. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_texture_pixel_format(sg_pixel_format fmt) {
  11330. switch (fmt) {
  11331. case SG_PIXELFORMAT_R8: return DXGI_FORMAT_R8_UNORM;
  11332. case SG_PIXELFORMAT_R8SN: return DXGI_FORMAT_R8_SNORM;
  11333. case SG_PIXELFORMAT_R8UI: return DXGI_FORMAT_R8_UINT;
  11334. case SG_PIXELFORMAT_R8SI: return DXGI_FORMAT_R8_SINT;
  11335. case SG_PIXELFORMAT_R16: return DXGI_FORMAT_R16_UNORM;
  11336. case SG_PIXELFORMAT_R16SN: return DXGI_FORMAT_R16_SNORM;
  11337. case SG_PIXELFORMAT_R16UI: return DXGI_FORMAT_R16_UINT;
  11338. case SG_PIXELFORMAT_R16SI: return DXGI_FORMAT_R16_SINT;
  11339. case SG_PIXELFORMAT_R16F: return DXGI_FORMAT_R16_FLOAT;
  11340. case SG_PIXELFORMAT_RG8: return DXGI_FORMAT_R8G8_UNORM;
  11341. case SG_PIXELFORMAT_RG8SN: return DXGI_FORMAT_R8G8_SNORM;
  11342. case SG_PIXELFORMAT_RG8UI: return DXGI_FORMAT_R8G8_UINT;
  11343. case SG_PIXELFORMAT_RG8SI: return DXGI_FORMAT_R8G8_SINT;
  11344. case SG_PIXELFORMAT_R32UI: return DXGI_FORMAT_R32_UINT;
  11345. case SG_PIXELFORMAT_R32SI: return DXGI_FORMAT_R32_SINT;
  11346. case SG_PIXELFORMAT_R32F: return DXGI_FORMAT_R32_FLOAT;
  11347. case SG_PIXELFORMAT_RG16: return DXGI_FORMAT_R16G16_UNORM;
  11348. case SG_PIXELFORMAT_RG16SN: return DXGI_FORMAT_R16G16_SNORM;
  11349. case SG_PIXELFORMAT_RG16UI: return DXGI_FORMAT_R16G16_UINT;
  11350. case SG_PIXELFORMAT_RG16SI: return DXGI_FORMAT_R16G16_SINT;
  11351. case SG_PIXELFORMAT_RG16F: return DXGI_FORMAT_R16G16_FLOAT;
  11352. case SG_PIXELFORMAT_RGBA8: return DXGI_FORMAT_R8G8B8A8_UNORM;
  11353. case SG_PIXELFORMAT_SRGB8A8: return DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;
  11354. case SG_PIXELFORMAT_RGBA8SN: return DXGI_FORMAT_R8G8B8A8_SNORM;
  11355. case SG_PIXELFORMAT_RGBA8UI: return DXGI_FORMAT_R8G8B8A8_UINT;
  11356. case SG_PIXELFORMAT_RGBA8SI: return DXGI_FORMAT_R8G8B8A8_SINT;
  11357. case SG_PIXELFORMAT_BGRA8: return DXGI_FORMAT_B8G8R8A8_UNORM;
  11358. case SG_PIXELFORMAT_RGB10A2: return DXGI_FORMAT_R10G10B10A2_UNORM;
  11359. case SG_PIXELFORMAT_RG11B10F: return DXGI_FORMAT_R11G11B10_FLOAT;
  11360. case SG_PIXELFORMAT_RGB9E5: return DXGI_FORMAT_R9G9B9E5_SHAREDEXP;
  11361. case SG_PIXELFORMAT_RG32UI: return DXGI_FORMAT_R32G32_UINT;
  11362. case SG_PIXELFORMAT_RG32SI: return DXGI_FORMAT_R32G32_SINT;
  11363. case SG_PIXELFORMAT_RG32F: return DXGI_FORMAT_R32G32_FLOAT;
  11364. case SG_PIXELFORMAT_RGBA16: return DXGI_FORMAT_R16G16B16A16_UNORM;
  11365. case SG_PIXELFORMAT_RGBA16SN: return DXGI_FORMAT_R16G16B16A16_SNORM;
  11366. case SG_PIXELFORMAT_RGBA16UI: return DXGI_FORMAT_R16G16B16A16_UINT;
  11367. case SG_PIXELFORMAT_RGBA16SI: return DXGI_FORMAT_R16G16B16A16_SINT;
  11368. case SG_PIXELFORMAT_RGBA16F: return DXGI_FORMAT_R16G16B16A16_FLOAT;
  11369. case SG_PIXELFORMAT_RGBA32UI: return DXGI_FORMAT_R32G32B32A32_UINT;
  11370. case SG_PIXELFORMAT_RGBA32SI: return DXGI_FORMAT_R32G32B32A32_SINT;
  11371. case SG_PIXELFORMAT_RGBA32F: return DXGI_FORMAT_R32G32B32A32_FLOAT;
  11372. case SG_PIXELFORMAT_DEPTH: return DXGI_FORMAT_R32_TYPELESS;
  11373. case SG_PIXELFORMAT_DEPTH_STENCIL: return DXGI_FORMAT_R24G8_TYPELESS;
  11374. case SG_PIXELFORMAT_BC1_RGBA: return DXGI_FORMAT_BC1_UNORM;
  11375. case SG_PIXELFORMAT_BC2_RGBA: return DXGI_FORMAT_BC2_UNORM;
  11376. case SG_PIXELFORMAT_BC3_RGBA: return DXGI_FORMAT_BC3_UNORM;
  11377. case SG_PIXELFORMAT_BC3_SRGBA: return DXGI_FORMAT_BC3_UNORM_SRGB;
  11378. case SG_PIXELFORMAT_BC4_R: return DXGI_FORMAT_BC4_UNORM;
  11379. case SG_PIXELFORMAT_BC4_RSN: return DXGI_FORMAT_BC4_SNORM;
  11380. case SG_PIXELFORMAT_BC5_RG: return DXGI_FORMAT_BC5_UNORM;
  11381. case SG_PIXELFORMAT_BC5_RGSN: return DXGI_FORMAT_BC5_SNORM;
  11382. case SG_PIXELFORMAT_BC6H_RGBF: return DXGI_FORMAT_BC6H_SF16;
  11383. case SG_PIXELFORMAT_BC6H_RGBUF: return DXGI_FORMAT_BC6H_UF16;
  11384. case SG_PIXELFORMAT_BC7_RGBA: return DXGI_FORMAT_BC7_UNORM;
  11385. case SG_PIXELFORMAT_BC7_SRGBA: return DXGI_FORMAT_BC7_UNORM_SRGB;
  11386. default: return DXGI_FORMAT_UNKNOWN;
  11387. };
  11388. }
  11389. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_srv_pixel_format(sg_pixel_format fmt) {
  11390. if (fmt == SG_PIXELFORMAT_DEPTH) {
  11391. return DXGI_FORMAT_R32_FLOAT;
  11392. } else if (fmt == SG_PIXELFORMAT_DEPTH_STENCIL) {
  11393. return DXGI_FORMAT_R24_UNORM_X8_TYPELESS;
  11394. } else {
  11395. return _sg_d3d11_texture_pixel_format(fmt);
  11396. }
  11397. }
  11398. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_dsv_pixel_format(sg_pixel_format fmt) {
  11399. if (fmt == SG_PIXELFORMAT_DEPTH) {
  11400. return DXGI_FORMAT_D32_FLOAT;
  11401. } else if (fmt == SG_PIXELFORMAT_DEPTH_STENCIL) {
  11402. return DXGI_FORMAT_D24_UNORM_S8_UINT;
  11403. } else {
  11404. return _sg_d3d11_texture_pixel_format(fmt);
  11405. }
  11406. }
  11407. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_rtv_uav_pixel_format(sg_pixel_format fmt) {
  11408. if (fmt == SG_PIXELFORMAT_DEPTH) {
  11409. return DXGI_FORMAT_R32_FLOAT;
  11410. } else if (fmt == SG_PIXELFORMAT_DEPTH_STENCIL) {
  11411. return DXGI_FORMAT_R24_UNORM_X8_TYPELESS;
  11412. } else {
  11413. return _sg_d3d11_texture_pixel_format(fmt);
  11414. }
  11415. }
  11416. _SOKOL_PRIVATE D3D11_PRIMITIVE_TOPOLOGY _sg_d3d11_primitive_topology(sg_primitive_type prim_type) {
  11417. switch (prim_type) {
  11418. case SG_PRIMITIVETYPE_POINTS: return D3D11_PRIMITIVE_TOPOLOGY_POINTLIST;
  11419. case SG_PRIMITIVETYPE_LINES: return D3D11_PRIMITIVE_TOPOLOGY_LINELIST;
  11420. case SG_PRIMITIVETYPE_LINE_STRIP: return D3D11_PRIMITIVE_TOPOLOGY_LINESTRIP;
  11421. case SG_PRIMITIVETYPE_TRIANGLES: return D3D11_PRIMITIVE_TOPOLOGY_TRIANGLELIST;
  11422. case SG_PRIMITIVETYPE_TRIANGLE_STRIP: return D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP;
  11423. default: SOKOL_UNREACHABLE; return (D3D11_PRIMITIVE_TOPOLOGY) 0;
  11424. }
  11425. }
  11426. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_index_format(sg_index_type index_type) {
  11427. switch (index_type) {
  11428. case SG_INDEXTYPE_NONE: return DXGI_FORMAT_UNKNOWN;
  11429. case SG_INDEXTYPE_UINT16: return DXGI_FORMAT_R16_UINT;
  11430. case SG_INDEXTYPE_UINT32: return DXGI_FORMAT_R32_UINT;
  11431. default: SOKOL_UNREACHABLE; return (DXGI_FORMAT) 0;
  11432. }
  11433. }
  11434. _SOKOL_PRIVATE D3D11_FILTER _sg_d3d11_filter(sg_filter min_f, sg_filter mag_f, sg_filter mipmap_f, bool comparison, uint32_t max_anisotropy) {
  11435. uint32_t d3d11_filter = 0;
  11436. if (max_anisotropy > 1) {
  11437. // D3D11_FILTER_ANISOTROPIC = 0x55,
  11438. d3d11_filter |= 0x55;
  11439. } else {
  11440. // D3D11_FILTER_MIN_MAG_MIP_POINT = 0,
  11441. // D3D11_FILTER_MIN_MAG_POINT_MIP_LINEAR = 0x1,
  11442. // D3D11_FILTER_MIN_POINT_MAG_LINEAR_MIP_POINT = 0x4,
  11443. // D3D11_FILTER_MIN_POINT_MAG_MIP_LINEAR = 0x5,
  11444. // D3D11_FILTER_MIN_LINEAR_MAG_MIP_POINT = 0x10,
  11445. // D3D11_FILTER_MIN_LINEAR_MAG_POINT_MIP_LINEAR = 0x11,
  11446. // D3D11_FILTER_MIN_MAG_LINEAR_MIP_POINT = 0x14,
  11447. // D3D11_FILTER_MIN_MAG_MIP_LINEAR = 0x15,
  11448. if (mipmap_f == SG_FILTER_LINEAR) {
  11449. d3d11_filter |= 0x01;
  11450. }
  11451. if (mag_f == SG_FILTER_LINEAR) {
  11452. d3d11_filter |= 0x04;
  11453. }
  11454. if (min_f == SG_FILTER_LINEAR) {
  11455. d3d11_filter |= 0x10;
  11456. }
  11457. }
  11458. // D3D11_FILTER_COMPARISON_MIN_MAG_MIP_POINT = 0x80,
  11459. // D3D11_FILTER_COMPARISON_MIN_MAG_POINT_MIP_LINEAR = 0x81,
  11460. // D3D11_FILTER_COMPARISON_MIN_POINT_MAG_LINEAR_MIP_POINT = 0x84,
  11461. // D3D11_FILTER_COMPARISON_MIN_POINT_MAG_MIP_LINEAR = 0x85,
  11462. // D3D11_FILTER_COMPARISON_MIN_LINEAR_MAG_MIP_POINT = 0x90,
  11463. // D3D11_FILTER_COMPARISON_MIN_LINEAR_MAG_POINT_MIP_LINEAR = 0x91,
  11464. // D3D11_FILTER_COMPARISON_MIN_MAG_LINEAR_MIP_POINT = 0x94,
  11465. // D3D11_FILTER_COMPARISON_MIN_MAG_MIP_LINEAR = 0x95,
  11466. // D3D11_FILTER_COMPARISON_ANISOTROPIC = 0xd5,
  11467. if (comparison) {
  11468. d3d11_filter |= 0x80;
  11469. }
  11470. return (D3D11_FILTER)d3d11_filter;
  11471. }
  11472. _SOKOL_PRIVATE D3D11_TEXTURE_ADDRESS_MODE _sg_d3d11_address_mode(sg_wrap m) {
  11473. switch (m) {
  11474. case SG_WRAP_REPEAT: return D3D11_TEXTURE_ADDRESS_WRAP;
  11475. case SG_WRAP_CLAMP_TO_EDGE: return D3D11_TEXTURE_ADDRESS_CLAMP;
  11476. case SG_WRAP_CLAMP_TO_BORDER: return D3D11_TEXTURE_ADDRESS_BORDER;
  11477. case SG_WRAP_MIRRORED_REPEAT: return D3D11_TEXTURE_ADDRESS_MIRROR;
  11478. default: SOKOL_UNREACHABLE; return (D3D11_TEXTURE_ADDRESS_MODE) 0;
  11479. }
  11480. }
  11481. _SOKOL_PRIVATE DXGI_FORMAT _sg_d3d11_vertex_format(sg_vertex_format fmt) {
  11482. switch (fmt) {
  11483. case SG_VERTEXFORMAT_FLOAT: return DXGI_FORMAT_R32_FLOAT;
  11484. case SG_VERTEXFORMAT_FLOAT2: return DXGI_FORMAT_R32G32_FLOAT;
  11485. case SG_VERTEXFORMAT_FLOAT3: return DXGI_FORMAT_R32G32B32_FLOAT;
  11486. case SG_VERTEXFORMAT_FLOAT4: return DXGI_FORMAT_R32G32B32A32_FLOAT;
  11487. case SG_VERTEXFORMAT_INT: return DXGI_FORMAT_R32_SINT;
  11488. case SG_VERTEXFORMAT_INT2: return DXGI_FORMAT_R32G32_SINT;
  11489. case SG_VERTEXFORMAT_INT3: return DXGI_FORMAT_R32G32B32_SINT;
  11490. case SG_VERTEXFORMAT_INT4: return DXGI_FORMAT_R32G32B32A32_SINT;
  11491. case SG_VERTEXFORMAT_UINT: return DXGI_FORMAT_R32_UINT;
  11492. case SG_VERTEXFORMAT_UINT2: return DXGI_FORMAT_R32G32_UINT;
  11493. case SG_VERTEXFORMAT_UINT3: return DXGI_FORMAT_R32G32B32_UINT;
  11494. case SG_VERTEXFORMAT_UINT4: return DXGI_FORMAT_R32G32B32A32_UINT;
  11495. case SG_VERTEXFORMAT_BYTE4: return DXGI_FORMAT_R8G8B8A8_SINT;
  11496. case SG_VERTEXFORMAT_BYTE4N: return DXGI_FORMAT_R8G8B8A8_SNORM;
  11497. case SG_VERTEXFORMAT_UBYTE4: return DXGI_FORMAT_R8G8B8A8_UINT;
  11498. case SG_VERTEXFORMAT_UBYTE4N: return DXGI_FORMAT_R8G8B8A8_UNORM;
  11499. case SG_VERTEXFORMAT_SHORT2: return DXGI_FORMAT_R16G16_SINT;
  11500. case SG_VERTEXFORMAT_SHORT2N: return DXGI_FORMAT_R16G16_SNORM;
  11501. case SG_VERTEXFORMAT_USHORT2: return DXGI_FORMAT_R16G16_UINT;
  11502. case SG_VERTEXFORMAT_USHORT2N: return DXGI_FORMAT_R16G16_UNORM;
  11503. case SG_VERTEXFORMAT_SHORT4: return DXGI_FORMAT_R16G16B16A16_SINT;
  11504. case SG_VERTEXFORMAT_SHORT4N: return DXGI_FORMAT_R16G16B16A16_SNORM;
  11505. case SG_VERTEXFORMAT_USHORT4: return DXGI_FORMAT_R16G16B16A16_UINT;
  11506. case SG_VERTEXFORMAT_USHORT4N: return DXGI_FORMAT_R16G16B16A16_UNORM;
  11507. case SG_VERTEXFORMAT_UINT10_N2: return DXGI_FORMAT_R10G10B10A2_UNORM;
  11508. case SG_VERTEXFORMAT_HALF2: return DXGI_FORMAT_R16G16_FLOAT;
  11509. case SG_VERTEXFORMAT_HALF4: return DXGI_FORMAT_R16G16B16A16_FLOAT;
  11510. default: SOKOL_UNREACHABLE; return (DXGI_FORMAT) 0;
  11511. }
  11512. }
  11513. _SOKOL_PRIVATE D3D11_INPUT_CLASSIFICATION _sg_d3d11_input_classification(sg_vertex_step step) {
  11514. switch (step) {
  11515. case SG_VERTEXSTEP_PER_VERTEX: return D3D11_INPUT_PER_VERTEX_DATA;
  11516. case SG_VERTEXSTEP_PER_INSTANCE: return D3D11_INPUT_PER_INSTANCE_DATA;
  11517. default: SOKOL_UNREACHABLE; return (D3D11_INPUT_CLASSIFICATION) 0;
  11518. }
  11519. }
  11520. _SOKOL_PRIVATE D3D11_CULL_MODE _sg_d3d11_cull_mode(sg_cull_mode m) {
  11521. switch (m) {
  11522. case SG_CULLMODE_NONE: return D3D11_CULL_NONE;
  11523. case SG_CULLMODE_FRONT: return D3D11_CULL_FRONT;
  11524. case SG_CULLMODE_BACK: return D3D11_CULL_BACK;
  11525. default: SOKOL_UNREACHABLE; return (D3D11_CULL_MODE) 0;
  11526. }
  11527. }
  11528. _SOKOL_PRIVATE D3D11_COMPARISON_FUNC _sg_d3d11_compare_func(sg_compare_func f) {
  11529. switch (f) {
  11530. case SG_COMPAREFUNC_NEVER: return D3D11_COMPARISON_NEVER;
  11531. case SG_COMPAREFUNC_LESS: return D3D11_COMPARISON_LESS;
  11532. case SG_COMPAREFUNC_EQUAL: return D3D11_COMPARISON_EQUAL;
  11533. case SG_COMPAREFUNC_LESS_EQUAL: return D3D11_COMPARISON_LESS_EQUAL;
  11534. case SG_COMPAREFUNC_GREATER: return D3D11_COMPARISON_GREATER;
  11535. case SG_COMPAREFUNC_NOT_EQUAL: return D3D11_COMPARISON_NOT_EQUAL;
  11536. case SG_COMPAREFUNC_GREATER_EQUAL: return D3D11_COMPARISON_GREATER_EQUAL;
  11537. case SG_COMPAREFUNC_ALWAYS: return D3D11_COMPARISON_ALWAYS;
  11538. default: SOKOL_UNREACHABLE; return (D3D11_COMPARISON_FUNC) 0;
  11539. }
  11540. }
  11541. _SOKOL_PRIVATE D3D11_STENCIL_OP _sg_d3d11_stencil_op(sg_stencil_op op) {
  11542. switch (op) {
  11543. case SG_STENCILOP_KEEP: return D3D11_STENCIL_OP_KEEP;
  11544. case SG_STENCILOP_ZERO: return D3D11_STENCIL_OP_ZERO;
  11545. case SG_STENCILOP_REPLACE: return D3D11_STENCIL_OP_REPLACE;
  11546. case SG_STENCILOP_INCR_CLAMP: return D3D11_STENCIL_OP_INCR_SAT;
  11547. case SG_STENCILOP_DECR_CLAMP: return D3D11_STENCIL_OP_DECR_SAT;
  11548. case SG_STENCILOP_INVERT: return D3D11_STENCIL_OP_INVERT;
  11549. case SG_STENCILOP_INCR_WRAP: return D3D11_STENCIL_OP_INCR;
  11550. case SG_STENCILOP_DECR_WRAP: return D3D11_STENCIL_OP_DECR;
  11551. default: SOKOL_UNREACHABLE; return (D3D11_STENCIL_OP) 0;
  11552. }
  11553. }
  11554. _SOKOL_PRIVATE D3D11_BLEND _sg_d3d11_blend_factor(sg_blend_factor f) {
  11555. switch (f) {
  11556. case SG_BLENDFACTOR_ZERO: return D3D11_BLEND_ZERO;
  11557. case SG_BLENDFACTOR_ONE: return D3D11_BLEND_ONE;
  11558. case SG_BLENDFACTOR_SRC_COLOR: return D3D11_BLEND_SRC_COLOR;
  11559. case SG_BLENDFACTOR_ONE_MINUS_SRC_COLOR: return D3D11_BLEND_INV_SRC_COLOR;
  11560. case SG_BLENDFACTOR_SRC_ALPHA: return D3D11_BLEND_SRC_ALPHA;
  11561. case SG_BLENDFACTOR_ONE_MINUS_SRC_ALPHA: return D3D11_BLEND_INV_SRC_ALPHA;
  11562. case SG_BLENDFACTOR_DST_COLOR: return D3D11_BLEND_DEST_COLOR;
  11563. case SG_BLENDFACTOR_ONE_MINUS_DST_COLOR: return D3D11_BLEND_INV_DEST_COLOR;
  11564. case SG_BLENDFACTOR_DST_ALPHA: return D3D11_BLEND_DEST_ALPHA;
  11565. case SG_BLENDFACTOR_ONE_MINUS_DST_ALPHA: return D3D11_BLEND_INV_DEST_ALPHA;
  11566. case SG_BLENDFACTOR_SRC_ALPHA_SATURATED: return D3D11_BLEND_SRC_ALPHA_SAT;
  11567. case SG_BLENDFACTOR_BLEND_COLOR: return D3D11_BLEND_BLEND_FACTOR;
  11568. case SG_BLENDFACTOR_ONE_MINUS_BLEND_COLOR: return D3D11_BLEND_INV_BLEND_FACTOR;
  11569. case SG_BLENDFACTOR_BLEND_ALPHA: return D3D11_BLEND_BLEND_FACTOR;
  11570. case SG_BLENDFACTOR_ONE_MINUS_BLEND_ALPHA: return D3D11_BLEND_INV_BLEND_FACTOR;
  11571. default: SOKOL_UNREACHABLE; return (D3D11_BLEND) 0;
  11572. }
  11573. }
  11574. _SOKOL_PRIVATE D3D11_BLEND_OP _sg_d3d11_blend_op(sg_blend_op op) {
  11575. switch (op) {
  11576. case SG_BLENDOP_ADD: return D3D11_BLEND_OP_ADD;
  11577. case SG_BLENDOP_SUBTRACT: return D3D11_BLEND_OP_SUBTRACT;
  11578. case SG_BLENDOP_REVERSE_SUBTRACT: return D3D11_BLEND_OP_REV_SUBTRACT;
  11579. case SG_BLENDOP_MIN: return D3D11_BLEND_OP_MIN;
  11580. case SG_BLENDOP_MAX: return D3D11_BLEND_OP_MAX;
  11581. default: SOKOL_UNREACHABLE; return (D3D11_BLEND_OP) 0;
  11582. }
  11583. }
  11584. _SOKOL_PRIVATE UINT8 _sg_d3d11_color_write_mask(sg_color_mask m) {
  11585. UINT8 res = 0;
  11586. if (m & SG_COLORMASK_R) {
  11587. res |= D3D11_COLOR_WRITE_ENABLE_RED;
  11588. }
  11589. if (m & SG_COLORMASK_G) {
  11590. res |= D3D11_COLOR_WRITE_ENABLE_GREEN;
  11591. }
  11592. if (m & SG_COLORMASK_B) {
  11593. res |= D3D11_COLOR_WRITE_ENABLE_BLUE;
  11594. }
  11595. if (m & SG_COLORMASK_A) {
  11596. res |= D3D11_COLOR_WRITE_ENABLE_ALPHA;
  11597. }
  11598. return res;
  11599. }
  11600. _SOKOL_PRIVATE UINT _sg_d3d11_dxgi_fmt_caps(DXGI_FORMAT dxgi_fmt) {
  11601. UINT dxgi_fmt_caps = 0;
  11602. if (dxgi_fmt != DXGI_FORMAT_UNKNOWN) {
  11603. HRESULT hr = _sg_d3d11_CheckFormatSupport(_sg.d3d11.dev, dxgi_fmt, &dxgi_fmt_caps);
  11604. SOKOL_ASSERT(SUCCEEDED(hr) || (E_FAIL == hr));
  11605. if (!SUCCEEDED(hr)) {
  11606. dxgi_fmt_caps = 0;
  11607. }
  11608. }
  11609. return dxgi_fmt_caps;
  11610. }
  11611. // see: https://docs.microsoft.com/en-us/windows/win32/direct3d11/overviews-direct3d-11-resources-limits#resource-limits-for-feature-level-11-hardware
  11612. _SOKOL_PRIVATE void _sg_d3d11_init_caps(void) {
  11613. _sg.backend = SG_BACKEND_D3D11;
  11614. _sg.features.origin_top_left = true;
  11615. _sg.features.image_clamp_to_border = true;
  11616. _sg.features.mrt_independent_blend_state = true;
  11617. _sg.features.mrt_independent_write_mask = true;
  11618. _sg.features.compute = true;
  11619. _sg.features.msaa_texture_bindings = true;
  11620. _sg.features.draw_base_vertex = true;
  11621. _sg.features.draw_base_instance = true;
  11622. _sg.limits.max_image_size_2d = 16 * 1024;
  11623. _sg.limits.max_image_size_cube = 16 * 1024;
  11624. _sg.limits.max_image_size_3d = 2 * 1024;
  11625. _sg.limits.max_image_size_array = 16 * 1024;
  11626. _sg.limits.max_image_array_layers = _SG_D3D11_MAX_TEXTUREARRAY_LAYERS;
  11627. _sg.limits.max_vertex_attrs = SG_MAX_VERTEX_ATTRIBUTES;
  11628. _sg.limits.max_color_attachments = _sg_min(8, SG_MAX_COLOR_ATTACHMENTS);
  11629. _sg.limits.max_texture_bindings_per_stage = _sg_min(128, SG_MAX_VIEW_BINDSLOTS);
  11630. _sg.limits.max_storage_buffer_bindings_per_stage = _sg_min(64, SG_MAX_VIEW_BINDSLOTS);
  11631. if (_sg_d3d11_GetFeatureLevel(_sg.d3d11.dev) >= D3D_FEATURE_LEVEL_11_1) {
  11632. _sg.limits.d3d11_max_unordered_access_views = _sg_min(64, SG_MAX_VIEW_BINDSLOTS);
  11633. } else {
  11634. _sg.limits.d3d11_max_unordered_access_views = _sg_min(8, SG_MAX_VIEW_BINDSLOTS);
  11635. }
  11636. _sg.limits.max_storage_image_bindings_per_stage = _sg.limits.d3d11_max_unordered_access_views;
  11637. // see: https://docs.microsoft.com/en-us/windows/win32/api/d3d11/ne-d3d11-d3d11_format_support
  11638. for (int fmt = (SG_PIXELFORMAT_NONE+1); fmt < _SG_PIXELFORMAT_NUM; fmt++) {
  11639. const UINT srv_dxgi_fmt_caps = _sg_d3d11_dxgi_fmt_caps(_sg_d3d11_srv_pixel_format((sg_pixel_format)fmt));
  11640. const UINT rtv_uav_dxgi_fmt_caps = _sg_d3d11_dxgi_fmt_caps(_sg_d3d11_rtv_uav_pixel_format((sg_pixel_format)fmt));
  11641. const UINT dsv_dxgi_fmt_caps = _sg_d3d11_dxgi_fmt_caps(_sg_d3d11_dsv_pixel_format((sg_pixel_format)fmt));
  11642. _sg_pixelformat_info_t* info = &_sg.formats[fmt];
  11643. const bool render = 0 != (rtv_uav_dxgi_fmt_caps & D3D11_FORMAT_SUPPORT_RENDER_TARGET);
  11644. const bool depth = 0 != (dsv_dxgi_fmt_caps & D3D11_FORMAT_SUPPORT_DEPTH_STENCIL);
  11645. info->sample = 0 != (srv_dxgi_fmt_caps & D3D11_FORMAT_SUPPORT_TEXTURE2D);
  11646. info->filter = 0 != (srv_dxgi_fmt_caps & D3D11_FORMAT_SUPPORT_SHADER_SAMPLE);
  11647. info->render = render || depth;
  11648. if (depth) {
  11649. info->blend = 0 != (dsv_dxgi_fmt_caps & D3D11_FORMAT_SUPPORT_BLENDABLE);
  11650. info->msaa = 0 != (dsv_dxgi_fmt_caps & D3D11_FORMAT_SUPPORT_MULTISAMPLE_RENDERTARGET);
  11651. } else {
  11652. info->blend = 0 != (rtv_uav_dxgi_fmt_caps & D3D11_FORMAT_SUPPORT_BLENDABLE);
  11653. info->msaa = 0 != (rtv_uav_dxgi_fmt_caps & D3D11_FORMAT_SUPPORT_MULTISAMPLE_RENDERTARGET);
  11654. }
  11655. info->depth = depth;
  11656. info->read = info->write = 0 != (rtv_uav_dxgi_fmt_caps & D3D11_FORMAT_SUPPORT_TYPED_UNORDERED_ACCESS_VIEW);
  11657. }
  11658. }
  11659. _SOKOL_PRIVATE void _sg_d3d11_setup_backend(const sg_desc* desc) {
  11660. // assume _sg.d3d11 already is zero-initialized
  11661. SOKOL_ASSERT(desc);
  11662. SOKOL_ASSERT(desc->environment.d3d11.device);
  11663. SOKOL_ASSERT(desc->environment.d3d11.device_context);
  11664. _sg.d3d11.valid = true;
  11665. _sg.d3d11.dev = (ID3D11Device*) desc->environment.d3d11.device;
  11666. _sg.d3d11.ctx = (ID3D11DeviceContext*) desc->environment.d3d11.device_context;
  11667. _sg_d3d11_init_caps();
  11668. if (_sg_d3d11_GetFeatureLevel(_sg.d3d11.dev) == D3D_FEATURE_LEVEL_11_0) {
  11669. _SG_WARN(D3D11_FEATURE_LEVEL_0_DETECTED);
  11670. }
  11671. }
  11672. _SOKOL_PRIVATE void _sg_d3d11_discard_backend(void) {
  11673. SOKOL_ASSERT(_sg.d3d11.valid);
  11674. _sg.d3d11.valid = false;
  11675. }
  11676. _SOKOL_PRIVATE void _sg_d3d11_clear_state(void) {
  11677. // clear all the device context state, so that resource refs don't keep stuck in the d3d device context
  11678. _sg_d3d11_ClearState(_sg.d3d11.ctx);
  11679. }
  11680. _SOKOL_PRIVATE void _sg_d3d11_reset_state_cache(void) {
  11681. // there's currently no state cache in the D3D11 backend, so this is a no-op
  11682. }
  11683. _SOKOL_PRIVATE sg_resource_state _sg_d3d11_create_buffer(_sg_buffer_t* buf, const sg_buffer_desc* desc) {
  11684. SOKOL_ASSERT(buf && desc);
  11685. SOKOL_ASSERT(!buf->d3d11.buf);
  11686. const bool injected = (0 != desc->d3d11_buffer);
  11687. if (injected) {
  11688. buf->d3d11.buf = (ID3D11Buffer*) desc->d3d11_buffer;
  11689. _sg_d3d11_AddRef(buf->d3d11.buf);
  11690. } else {
  11691. _SG_STRUCT(D3D11_BUFFER_DESC, d3d11_buf_desc);
  11692. d3d11_buf_desc.ByteWidth = (UINT)buf->cmn.size;
  11693. d3d11_buf_desc.Usage = _sg_d3d11_buffer_usage(&buf->cmn.usage);
  11694. d3d11_buf_desc.BindFlags = _sg_d3d11_buffer_bind_flags(&buf->cmn.usage);
  11695. d3d11_buf_desc.CPUAccessFlags = _sg_d3d11_buffer_cpu_access_flags(&buf->cmn.usage);
  11696. d3d11_buf_desc.MiscFlags = _sg_d3d11_buffer_misc_flags(&buf->cmn.usage);
  11697. D3D11_SUBRESOURCE_DATA* init_data_ptr = 0;
  11698. _SG_STRUCT(D3D11_SUBRESOURCE_DATA, init_data);
  11699. if (desc->data.ptr) {
  11700. init_data.pSysMem = desc->data.ptr;
  11701. init_data_ptr = &init_data;
  11702. }
  11703. HRESULT hr = _sg_d3d11_CreateBuffer(_sg.d3d11.dev, &d3d11_buf_desc, init_data_ptr, &buf->d3d11.buf);
  11704. if (!(SUCCEEDED(hr) && buf->d3d11.buf)) {
  11705. _SG_ERROR(D3D11_CREATE_BUFFER_FAILED);
  11706. return SG_RESOURCESTATE_FAILED;
  11707. }
  11708. _sg_d3d11_setlabel(buf->d3d11.buf, desc->label);
  11709. }
  11710. return SG_RESOURCESTATE_VALID;
  11711. }
  11712. _SOKOL_PRIVATE void _sg_d3d11_discard_buffer(_sg_buffer_t* buf) {
  11713. SOKOL_ASSERT(buf);
  11714. if (buf->d3d11.buf) {
  11715. _sg_d3d11_Release(buf->d3d11.buf);
  11716. }
  11717. }
  11718. _SOKOL_PRIVATE void _sg_d3d11_fill_subres_data(const _sg_image_t* img, const sg_image_data* data) {
  11719. const int num_slices = (img->cmn.type == SG_IMAGETYPE_3D) ? 1 : img->cmn.num_slices;
  11720. int subres_index = 0;
  11721. for (int slice_index = 0; slice_index < num_slices; slice_index++) {
  11722. for (int mip_index = 0; mip_index < img->cmn.num_mipmaps; mip_index++, subres_index++) {
  11723. SOKOL_ASSERT(subres_index < _SG_D3D11_MAX_TEXTURE_SUBRESOURCES);
  11724. D3D11_SUBRESOURCE_DATA* subres_data = &_sg.d3d11.subres_data[subres_index];
  11725. const int mip_width = _sg_miplevel_dim(img->cmn.width, mip_index);
  11726. const int mip_height = _sg_miplevel_dim(img->cmn.height, mip_index);
  11727. const sg_range* miplevel_data = &(data->mip_levels[mip_index]);
  11728. const size_t slice_size = miplevel_data->size / (size_t)num_slices;
  11729. const size_t slice_offset = slice_size * (size_t)slice_index;
  11730. const uint8_t* ptr = (const uint8_t*) miplevel_data->ptr;
  11731. subres_data->pSysMem = ptr + slice_offset;
  11732. subres_data->SysMemPitch = (UINT)_sg_row_pitch(img->cmn.pixel_format, mip_width, 1);
  11733. if (img->cmn.type == SG_IMAGETYPE_3D) {
  11734. subres_data->SysMemSlicePitch = (UINT)_sg_surface_pitch(img->cmn.pixel_format, mip_width, mip_height, 1);
  11735. } else {
  11736. subres_data->SysMemSlicePitch = 0;
  11737. }
  11738. }
  11739. }
  11740. }
  11741. _SOKOL_PRIVATE sg_resource_state _sg_d3d11_create_image(_sg_image_t* img, const sg_image_desc* desc) {
  11742. SOKOL_ASSERT(img && desc);
  11743. SOKOL_ASSERT((0 == img->d3d11.tex2d) && (0 == img->d3d11.tex3d) && (0 == img->d3d11.res));
  11744. HRESULT hr;
  11745. const bool injected = (0 != desc->d3d11_texture);
  11746. const bool msaa = (img->cmn.sample_count > 1);
  11747. SOKOL_ASSERT(!(msaa && (img->cmn.type == SG_IMAGETYPE_CUBE)));
  11748. img->d3d11.format = _sg_d3d11_texture_pixel_format(img->cmn.pixel_format);
  11749. if (img->d3d11.format == DXGI_FORMAT_UNKNOWN) {
  11750. _SG_ERROR(D3D11_CREATE_2D_TEXTURE_UNSUPPORTED_PIXEL_FORMAT);
  11751. return SG_RESOURCESTATE_FAILED;
  11752. }
  11753. // prepare initial content pointers
  11754. D3D11_SUBRESOURCE_DATA* init_data = 0;
  11755. if (!injected && desc->data.mip_levels[0].ptr) {
  11756. _sg_d3d11_fill_subres_data(img, &desc->data);
  11757. init_data = _sg.d3d11.subres_data;
  11758. }
  11759. if (img->cmn.type != SG_IMAGETYPE_3D) {
  11760. // 2D-, cube- or array-texture
  11761. // first check for injected texture and/or resource view
  11762. if (injected) {
  11763. img->d3d11.tex2d = (ID3D11Texture2D*) desc->d3d11_texture;
  11764. _sg_d3d11_AddRef(img->d3d11.tex2d);
  11765. } else {
  11766. // if not injected, create 2D texture
  11767. _SG_STRUCT(D3D11_TEXTURE2D_DESC, d3d11_tex_desc);
  11768. d3d11_tex_desc.Width = (UINT)img->cmn.width;
  11769. d3d11_tex_desc.Height = (UINT)img->cmn.height;
  11770. d3d11_tex_desc.MipLevels = (UINT)img->cmn.num_mipmaps;
  11771. d3d11_tex_desc.ArraySize = (UINT)img->cmn.num_slices;
  11772. d3d11_tex_desc.Format = img->d3d11.format;
  11773. d3d11_tex_desc.BindFlags = _sg_d3d11_image_bind_flags(&img->cmn.usage);
  11774. d3d11_tex_desc.Usage = _sg_d3d11_image_usage(&img->cmn.usage);
  11775. d3d11_tex_desc.CPUAccessFlags = _sg_d3d11_image_cpu_access_flags(&img->cmn.usage);
  11776. d3d11_tex_desc.SampleDesc.Count = (UINT)img->cmn.sample_count;
  11777. d3d11_tex_desc.SampleDesc.Quality = (UINT) (msaa ? D3D11_STANDARD_MULTISAMPLE_PATTERN : 0);
  11778. d3d11_tex_desc.MiscFlags = (img->cmn.type == SG_IMAGETYPE_CUBE) ? D3D11_RESOURCE_MISC_TEXTURECUBE : 0;
  11779. hr = _sg_d3d11_CreateTexture2D(_sg.d3d11.dev, &d3d11_tex_desc, init_data, &img->d3d11.tex2d);
  11780. if (!(SUCCEEDED(hr) && img->d3d11.tex2d)) {
  11781. _SG_ERROR(D3D11_CREATE_2D_TEXTURE_FAILED);
  11782. return SG_RESOURCESTATE_FAILED;
  11783. }
  11784. _sg_d3d11_setlabel(img->d3d11.tex2d, desc->label);
  11785. }
  11786. SOKOL_ASSERT(img->d3d11.tex2d);
  11787. img->d3d11.res = (ID3D11Resource*)img->d3d11.tex2d;
  11788. _sg_d3d11_AddRef(img->d3d11.res);
  11789. } else {
  11790. // 3D texture - same procedure, first check if injected, than create non-injected
  11791. if (injected) {
  11792. img->d3d11.tex3d = (ID3D11Texture3D*) desc->d3d11_texture;
  11793. _sg_d3d11_AddRef(img->d3d11.tex3d);
  11794. } else {
  11795. // not injected, create 3d texture
  11796. _SG_STRUCT(D3D11_TEXTURE3D_DESC, d3d11_tex_desc);
  11797. d3d11_tex_desc.Width = (UINT)img->cmn.width;
  11798. d3d11_tex_desc.Height = (UINT)img->cmn.height;
  11799. d3d11_tex_desc.Depth = (UINT)img->cmn.num_slices;
  11800. d3d11_tex_desc.MipLevels = (UINT)img->cmn.num_mipmaps;
  11801. d3d11_tex_desc.Format = img->d3d11.format;
  11802. d3d11_tex_desc.BindFlags = _sg_d3d11_image_bind_flags(&img->cmn.usage);
  11803. d3d11_tex_desc.Usage = _sg_d3d11_image_usage(&img->cmn.usage);
  11804. d3d11_tex_desc.CPUAccessFlags = _sg_d3d11_image_cpu_access_flags(&img->cmn.usage);
  11805. if (img->d3d11.format == DXGI_FORMAT_UNKNOWN) {
  11806. _SG_ERROR(D3D11_CREATE_3D_TEXTURE_UNSUPPORTED_PIXEL_FORMAT);
  11807. return SG_RESOURCESTATE_FAILED;
  11808. }
  11809. hr = _sg_d3d11_CreateTexture3D(_sg.d3d11.dev, &d3d11_tex_desc, init_data, &img->d3d11.tex3d);
  11810. if (!(SUCCEEDED(hr) && img->d3d11.tex3d)) {
  11811. _SG_ERROR(D3D11_CREATE_3D_TEXTURE_FAILED);
  11812. return SG_RESOURCESTATE_FAILED;
  11813. }
  11814. _sg_d3d11_setlabel(img->d3d11.tex3d, desc->label);
  11815. }
  11816. SOKOL_ASSERT(img->d3d11.tex3d);
  11817. img->d3d11.res = (ID3D11Resource*)img->d3d11.tex3d;
  11818. _sg_d3d11_AddRef(img->d3d11.res);
  11819. }
  11820. return SG_RESOURCESTATE_VALID;
  11821. }
  11822. _SOKOL_PRIVATE void _sg_d3d11_discard_image(_sg_image_t* img) {
  11823. SOKOL_ASSERT(img);
  11824. if (img->d3d11.tex2d) {
  11825. _sg_d3d11_Release(img->d3d11.tex2d);
  11826. }
  11827. if (img->d3d11.tex3d) {
  11828. _sg_d3d11_Release(img->d3d11.tex3d);
  11829. }
  11830. if (img->d3d11.res) {
  11831. _sg_d3d11_Release(img->d3d11.res);
  11832. }
  11833. }
  11834. _SOKOL_PRIVATE sg_resource_state _sg_d3d11_create_sampler(_sg_sampler_t* smp, const sg_sampler_desc* desc) {
  11835. SOKOL_ASSERT(smp && desc);
  11836. SOKOL_ASSERT(0 == smp->d3d11.smp);
  11837. const bool injected = (0 != desc->d3d11_sampler);
  11838. if (injected) {
  11839. smp->d3d11.smp = (ID3D11SamplerState*)desc->d3d11_sampler;
  11840. _sg_d3d11_AddRef(smp->d3d11.smp);
  11841. } else {
  11842. _SG_STRUCT(D3D11_SAMPLER_DESC, d3d11_smp_desc);
  11843. d3d11_smp_desc.Filter = _sg_d3d11_filter(desc->min_filter, desc->mag_filter, desc->mipmap_filter, desc->compare != SG_COMPAREFUNC_NEVER, desc->max_anisotropy);
  11844. d3d11_smp_desc.AddressU = _sg_d3d11_address_mode(desc->wrap_u);
  11845. d3d11_smp_desc.AddressV = _sg_d3d11_address_mode(desc->wrap_v);
  11846. d3d11_smp_desc.AddressW = _sg_d3d11_address_mode(desc->wrap_w);
  11847. d3d11_smp_desc.MipLODBias = 0.0f; // FIXME?
  11848. switch (desc->border_color) {
  11849. case SG_BORDERCOLOR_TRANSPARENT_BLACK:
  11850. // all 0.0f
  11851. break;
  11852. case SG_BORDERCOLOR_OPAQUE_WHITE:
  11853. for (int i = 0; i < 4; i++) {
  11854. d3d11_smp_desc.BorderColor[i] = 1.0f;
  11855. }
  11856. break;
  11857. default:
  11858. // opaque black
  11859. d3d11_smp_desc.BorderColor[3] = 1.0f;
  11860. break;
  11861. }
  11862. d3d11_smp_desc.MaxAnisotropy = desc->max_anisotropy;
  11863. d3d11_smp_desc.ComparisonFunc = _sg_d3d11_compare_func(desc->compare);
  11864. d3d11_smp_desc.MinLOD = desc->min_lod;
  11865. d3d11_smp_desc.MaxLOD = desc->max_lod;
  11866. HRESULT hr = _sg_d3d11_CreateSamplerState(_sg.d3d11.dev, &d3d11_smp_desc, &smp->d3d11.smp);
  11867. if (!(SUCCEEDED(hr) && smp->d3d11.smp)) {
  11868. _SG_ERROR(D3D11_CREATE_SAMPLER_STATE_FAILED);
  11869. return SG_RESOURCESTATE_FAILED;
  11870. }
  11871. _sg_d3d11_setlabel(smp->d3d11.smp, desc->label);
  11872. }
  11873. return SG_RESOURCESTATE_VALID;
  11874. }
  11875. _SOKOL_PRIVATE void _sg_d3d11_discard_sampler(_sg_sampler_t* smp) {
  11876. SOKOL_ASSERT(smp);
  11877. if (smp->d3d11.smp) {
  11878. _sg_d3d11_Release(smp->d3d11.smp);
  11879. }
  11880. }
  11881. _SOKOL_PRIVATE bool _sg_d3d11_load_d3dcompiler_dll(void) {
  11882. if ((0 == _sg.d3d11.d3dcompiler_dll) && !_sg.d3d11.d3dcompiler_dll_load_failed) {
  11883. _sg.d3d11.d3dcompiler_dll = LoadLibraryA("d3dcompiler_47.dll");
  11884. if (0 == _sg.d3d11.d3dcompiler_dll) {
  11885. // don't attempt to load missing DLL in the future
  11886. _SG_ERROR(D3D11_LOAD_D3DCOMPILER_47_DLL_FAILED);
  11887. _sg.d3d11.d3dcompiler_dll_load_failed = true;
  11888. return false;
  11889. }
  11890. // look up function pointers
  11891. _sg.d3d11.D3DCompile_func = (pD3DCompile)(void*) GetProcAddress(_sg.d3d11.d3dcompiler_dll, "D3DCompile");
  11892. SOKOL_ASSERT(_sg.d3d11.D3DCompile_func);
  11893. }
  11894. return 0 != _sg.d3d11.d3dcompiler_dll;
  11895. }
  11896. _SOKOL_PRIVATE ID3DBlob* _sg_d3d11_compile_shader(const sg_shader_function* shd_func) {
  11897. if (!_sg_d3d11_load_d3dcompiler_dll()) {
  11898. return NULL;
  11899. }
  11900. SOKOL_ASSERT(shd_func->d3d11_target);
  11901. UINT flags1 = D3DCOMPILE_PACK_MATRIX_COLUMN_MAJOR;
  11902. if (_sg.desc.d3d11.shader_debugging) {
  11903. flags1 |= D3DCOMPILE_DEBUG | D3DCOMPILE_SKIP_OPTIMIZATION;
  11904. } else {
  11905. flags1 |= D3DCOMPILE_OPTIMIZATION_LEVEL3;
  11906. }
  11907. ID3DBlob* output = NULL;
  11908. ID3DBlob* errors_or_warnings = NULL;
  11909. HRESULT hr = _sg.d3d11.D3DCompile_func(
  11910. shd_func->source, // pSrcData
  11911. strlen(shd_func->source), // SrcDataSize
  11912. shd_func->d3d11_filepath, // pSourceName
  11913. NULL, // pDefines
  11914. D3D_COMPILE_STANDARD_FILE_INCLUDE, // pInclude
  11915. shd_func->entry ? shd_func->entry : "main", // pEntryPoint
  11916. shd_func->d3d11_target, // pTarget
  11917. flags1, // Flags1
  11918. 0, // Flags2
  11919. &output, // ppCode
  11920. &errors_or_warnings); // ppErrorMsgs
  11921. if (FAILED(hr)) {
  11922. _SG_ERROR(D3D11_SHADER_COMPILATION_FAILED);
  11923. }
  11924. if (errors_or_warnings) {
  11925. _SG_WARN(D3D11_SHADER_COMPILATION_OUTPUT);
  11926. _SG_LOGMSG(D3D11_SHADER_COMPILATION_OUTPUT, (LPCSTR)_sg_d3d11_GetBufferPointer(errors_or_warnings));
  11927. _sg_d3d11_Release(errors_or_warnings); errors_or_warnings = NULL;
  11928. }
  11929. if (FAILED(hr)) {
  11930. // just in case, usually output is NULL here
  11931. if (output) {
  11932. _sg_d3d11_Release(output);
  11933. output = NULL;
  11934. }
  11935. }
  11936. return output;
  11937. }
  11938. // NOTE: this is an out-of-range check for HLSL bindslots that's also active in release mode
  11939. _SOKOL_PRIVATE bool _sg_d3d11_ensure_hlsl_bindslot_ranges(const sg_shader_desc* desc) {
  11940. SOKOL_ASSERT(desc);
  11941. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  11942. const sg_shader_uniform_block* ub = &desc->uniform_blocks[i];
  11943. if (ub->stage != SG_SHADERSTAGE_NONE) {
  11944. if (ub->hlsl_register_b_n >= _SG_D3D11_MAX_STAGE_UB_BINDINGS) {
  11945. _SG_ERROR(D3D11_UNIFORMBLOCK_HLSL_REGISTER_B_OUT_OF_RANGE);
  11946. return false;
  11947. }
  11948. }
  11949. }
  11950. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  11951. const sg_shader_view* view = &desc->views[i];
  11952. if (view->texture.stage != SG_SHADERSTAGE_NONE) {
  11953. if (view->texture.hlsl_register_t_n >= _SG_D3D11_MAX_STAGE_SRV_BINDINGS) {
  11954. _SG_ERROR(D3D11_IMAGE_HLSL_REGISTER_T_OUT_OF_RANGE);
  11955. return false;
  11956. }
  11957. }
  11958. if (view->storage_buffer.stage != SG_SHADERSTAGE_NONE) {
  11959. if (view->storage_buffer.hlsl_register_t_n >= _SG_D3D11_MAX_STAGE_SRV_BINDINGS) {
  11960. _SG_ERROR(D3D11_STORAGEBUFFER_HLSL_REGISTER_T_OUT_OF_RANGE);
  11961. return false;
  11962. }
  11963. if (view->storage_buffer.hlsl_register_u_n >= _SG_D3D11_MAX_STAGE_UAV_BINDINGS) {
  11964. _SG_ERROR(D3D11_STORAGEBUFFER_HLSL_REGISTER_U_OUT_OF_RANGE);
  11965. return false;
  11966. }
  11967. }
  11968. if (view->storage_image.stage != SG_SHADERSTAGE_NONE) {
  11969. if (view->storage_image.hlsl_register_u_n >= _SG_D3D11_MAX_STAGE_UAV_BINDINGS) {
  11970. _SG_ERROR(D3D11_STORAGEIMAGE_HLSL_REGISTER_U_OUT_OF_RANGE);
  11971. return false;
  11972. }
  11973. }
  11974. }
  11975. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  11976. const sg_shader_sampler* smp = &desc->samplers[i];
  11977. if (smp->stage != SG_SHADERSTAGE_NONE) {
  11978. if (smp->hlsl_register_s_n >= _SG_D3D11_MAX_STAGE_SMP_BINDINGS) {
  11979. _SG_ERROR(D3D11_SAMPLER_HLSL_REGISTER_S_OUT_OF_RANGE);
  11980. return false;
  11981. }
  11982. }
  11983. }
  11984. return true;
  11985. }
  11986. _SOKOL_PRIVATE sg_resource_state _sg_d3d11_create_shader(_sg_shader_t* shd, const sg_shader_desc* desc) {
  11987. SOKOL_ASSERT(shd && desc);
  11988. SOKOL_ASSERT(!shd->d3d11.vs && !shd->d3d11.fs && !shd->d3d11.cs && !shd->d3d11.vs_blob);
  11989. HRESULT hr;
  11990. // perform a range-check on HLSL bindslots that's also active in release
  11991. // mode to avoid potential out-of-bounds array accesses
  11992. if (!_sg_d3d11_ensure_hlsl_bindslot_ranges(desc)) {
  11993. return SG_RESOURCESTATE_FAILED;
  11994. }
  11995. // copy vertex attribute semantic names and indices
  11996. for (size_t i = 0; i < SG_MAX_VERTEX_ATTRIBUTES; i++) {
  11997. _sg_strcpy(&shd->d3d11.attrs[i].sem_name, desc->attrs[i].hlsl_sem_name);
  11998. shd->d3d11.attrs[i].sem_index = desc->attrs[i].hlsl_sem_index;
  11999. }
  12000. // copy HLSL bind slots
  12001. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  12002. SOKOL_ASSERT(0 == shd->d3d11.ub_register_b_n[i]);
  12003. shd->d3d11.ub_register_b_n[i] = desc->uniform_blocks[i].hlsl_register_b_n;
  12004. }
  12005. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  12006. const sg_shader_view* view = &desc->views[i];
  12007. SOKOL_ASSERT((0 == shd->d3d11.view_register_t_n[i]) && (0 == shd->d3d11.view_register_u_n[i]));
  12008. if (view->storage_buffer.stage != SG_SHADERSTAGE_NONE) {
  12009. shd->d3d11.view_register_t_n[i] = view->storage_buffer.hlsl_register_t_n;
  12010. shd->d3d11.view_register_u_n[i] = view->storage_buffer.hlsl_register_u_n;
  12011. } else if (view->texture.stage != SG_SHADERSTAGE_NONE) {
  12012. shd->d3d11.view_register_t_n[i] = view->texture.hlsl_register_t_n;
  12013. } else if (view->storage_image.stage != SG_SHADERSTAGE_NONE) {
  12014. shd->d3d11.view_register_u_n[i] = view->storage_image.hlsl_register_u_n;
  12015. }
  12016. }
  12017. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  12018. SOKOL_ASSERT(0 == shd->d3d11.smp_register_s_n[i]);
  12019. shd->d3d11.smp_register_s_n[i] = desc->samplers[i].hlsl_register_s_n;
  12020. }
  12021. // create a D3D constant buffer for each uniform block
  12022. for (size_t ub_index = 0; ub_index < SG_MAX_UNIFORMBLOCK_BINDSLOTS; ub_index++) {
  12023. const sg_shader_stage stage = desc->uniform_blocks[ub_index].stage;
  12024. if (stage == SG_SHADERSTAGE_NONE) {
  12025. continue;
  12026. }
  12027. const _sg_shader_uniform_block_t* ub = &shd->cmn.uniform_blocks[ub_index];
  12028. ID3D11Buffer* cbuf = 0;
  12029. _SG_STRUCT(D3D11_BUFFER_DESC, cb_desc);
  12030. cb_desc.ByteWidth = (UINT)_sg_roundup((int)ub->size, 16);
  12031. cb_desc.Usage = D3D11_USAGE_DEFAULT;
  12032. cb_desc.BindFlags = D3D11_BIND_CONSTANT_BUFFER;
  12033. hr = _sg_d3d11_CreateBuffer(_sg.d3d11.dev, &cb_desc, NULL, &cbuf);
  12034. if (!(SUCCEEDED(hr) && cbuf)) {
  12035. _SG_ERROR(D3D11_CREATE_CONSTANT_BUFFER_FAILED);
  12036. return SG_RESOURCESTATE_FAILED;
  12037. }
  12038. _sg_d3d11_setlabel(cbuf, desc->label);
  12039. shd->d3d11.all_cbufs[ub_index] = cbuf;
  12040. const uint8_t d3d11_slot = shd->d3d11.ub_register_b_n[ub_index];
  12041. SOKOL_ASSERT(d3d11_slot < _SG_D3D11_MAX_STAGE_UB_BINDINGS);
  12042. if (stage == SG_SHADERSTAGE_VERTEX) {
  12043. SOKOL_ASSERT(0 == shd->d3d11.vs_cbufs[d3d11_slot]);
  12044. shd->d3d11.vs_cbufs[d3d11_slot] = cbuf;
  12045. } else if (stage == SG_SHADERSTAGE_FRAGMENT) {
  12046. SOKOL_ASSERT(0 == shd->d3d11.fs_cbufs[d3d11_slot]);
  12047. shd->d3d11.fs_cbufs[d3d11_slot] = cbuf;
  12048. } else if (stage == SG_SHADERSTAGE_COMPUTE) {
  12049. SOKOL_ASSERT(0 == shd->d3d11.cs_cbufs[d3d11_slot]);
  12050. shd->d3d11.cs_cbufs[d3d11_slot] = cbuf;
  12051. } else {
  12052. SOKOL_UNREACHABLE;
  12053. }
  12054. }
  12055. // create shader functions
  12056. const bool has_vs = desc->vertex_func.bytecode.ptr || desc->vertex_func.source;
  12057. const bool has_fs = desc->fragment_func.bytecode.ptr || desc->fragment_func.source;
  12058. const bool has_cs = desc->compute_func.bytecode.ptr || desc->compute_func.source;
  12059. bool vs_valid = false; bool fs_valid = false; bool cs_valid = false;
  12060. if (has_vs) {
  12061. const void* vs_ptr = 0; SIZE_T vs_length = 0;
  12062. ID3DBlob* vs_blob = 0;
  12063. if (desc->vertex_func.bytecode.ptr) {
  12064. SOKOL_ASSERT(desc->vertex_func.bytecode.size > 0);
  12065. vs_ptr = desc->vertex_func.bytecode.ptr;
  12066. vs_length = desc->vertex_func.bytecode.size;
  12067. } else {
  12068. SOKOL_ASSERT(desc->vertex_func.source);
  12069. vs_blob = _sg_d3d11_compile_shader(&desc->vertex_func);
  12070. if (vs_blob) {
  12071. vs_ptr = _sg_d3d11_GetBufferPointer(vs_blob);
  12072. vs_length = _sg_d3d11_GetBufferSize(vs_blob);
  12073. }
  12074. }
  12075. if (vs_ptr && (vs_length > 0)) {
  12076. hr = _sg_d3d11_CreateVertexShader(_sg.d3d11.dev, vs_ptr, vs_length, NULL, &shd->d3d11.vs);
  12077. vs_valid = SUCCEEDED(hr) && shd->d3d11.vs;
  12078. }
  12079. // set label, and need to store a copy of the vertex shader blob for the pipeline creation
  12080. if (vs_valid) {
  12081. _sg_d3d11_setlabel(shd->d3d11.vs, desc->label);
  12082. shd->d3d11.vs_blob_length = vs_length;
  12083. shd->d3d11.vs_blob = _sg_malloc((size_t)vs_length);
  12084. SOKOL_ASSERT(shd->d3d11.vs_blob);
  12085. memcpy(shd->d3d11.vs_blob, vs_ptr, vs_length);
  12086. }
  12087. if (vs_blob) {
  12088. _sg_d3d11_Release(vs_blob);
  12089. }
  12090. }
  12091. if (has_fs) {
  12092. const void* fs_ptr = 0; SIZE_T fs_length = 0;
  12093. ID3DBlob* fs_blob = 0;
  12094. if (desc->fragment_func.bytecode.ptr) {
  12095. SOKOL_ASSERT(desc->fragment_func.bytecode.size > 0);
  12096. fs_ptr = desc->fragment_func.bytecode.ptr;
  12097. fs_length = desc->fragment_func.bytecode.size;
  12098. } else {
  12099. SOKOL_ASSERT(desc->fragment_func.source);
  12100. fs_blob = _sg_d3d11_compile_shader(&desc->fragment_func);
  12101. if (fs_blob) {
  12102. fs_ptr = _sg_d3d11_GetBufferPointer(fs_blob);
  12103. fs_length = _sg_d3d11_GetBufferSize(fs_blob);
  12104. }
  12105. }
  12106. if (fs_ptr && (fs_length > 0)) {
  12107. hr = _sg_d3d11_CreatePixelShader(_sg.d3d11.dev, fs_ptr, fs_length, NULL, &shd->d3d11.fs);
  12108. fs_valid = SUCCEEDED(hr) && shd->d3d11.fs;
  12109. }
  12110. if (fs_valid) {
  12111. _sg_d3d11_setlabel(shd->d3d11.fs, desc->label);
  12112. }
  12113. if (fs_blob) {
  12114. _sg_d3d11_Release(fs_blob);
  12115. }
  12116. }
  12117. if (has_cs) {
  12118. const void* cs_ptr = 0; SIZE_T cs_length = 0;
  12119. ID3DBlob* cs_blob = 0;
  12120. if (desc->compute_func.bytecode.ptr) {
  12121. SOKOL_ASSERT(desc->compute_func.bytecode.size > 0);
  12122. cs_ptr = desc->compute_func.bytecode.ptr;
  12123. cs_length = desc->compute_func.bytecode.size;
  12124. } else {
  12125. SOKOL_ASSERT(desc->compute_func.source);
  12126. cs_blob = _sg_d3d11_compile_shader(&desc->compute_func);
  12127. if (cs_blob) {
  12128. cs_ptr = _sg_d3d11_GetBufferPointer(cs_blob);
  12129. cs_length = _sg_d3d11_GetBufferSize(cs_blob);
  12130. }
  12131. }
  12132. if (cs_ptr && (cs_length > 0)) {
  12133. hr = _sg_d3d11_CreateComputeShader(_sg.d3d11.dev, cs_ptr, cs_length, NULL, &shd->d3d11.cs);
  12134. cs_valid = SUCCEEDED(hr) && shd->d3d11.cs;
  12135. }
  12136. if (cs_blob) {
  12137. _sg_d3d11_Release(cs_blob);
  12138. }
  12139. }
  12140. if ((vs_valid && fs_valid) || cs_valid) {
  12141. return SG_RESOURCESTATE_VALID;
  12142. } else {
  12143. return SG_RESOURCESTATE_FAILED;
  12144. }
  12145. }
  12146. _SOKOL_PRIVATE void _sg_d3d11_discard_shader(_sg_shader_t* shd) {
  12147. SOKOL_ASSERT(shd);
  12148. if (shd->d3d11.vs) {
  12149. _sg_d3d11_Release(shd->d3d11.vs);
  12150. }
  12151. if (shd->d3d11.fs) {
  12152. _sg_d3d11_Release(shd->d3d11.fs);
  12153. }
  12154. if (shd->d3d11.cs) {
  12155. _sg_d3d11_Release(shd->d3d11.cs);
  12156. }
  12157. if (shd->d3d11.vs_blob) {
  12158. _sg_free(shd->d3d11.vs_blob);
  12159. }
  12160. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  12161. if (shd->d3d11.all_cbufs[i]) {
  12162. _sg_d3d11_Release(shd->d3d11.all_cbufs[i]);
  12163. }
  12164. }
  12165. }
  12166. _SOKOL_PRIVATE sg_resource_state _sg_d3d11_create_pipeline(_sg_pipeline_t* pip, const sg_pipeline_desc* desc) {
  12167. SOKOL_ASSERT(pip && desc);
  12168. _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  12169. // if this is a compute pipeline, we're done here
  12170. if (pip->cmn.is_compute) {
  12171. return SG_RESOURCESTATE_VALID;
  12172. }
  12173. // a render pipeline...
  12174. SOKOL_ASSERT(shd->d3d11.vs_blob && shd->d3d11.vs_blob_length > 0);
  12175. SOKOL_ASSERT(!pip->d3d11.il && !pip->d3d11.rs && !pip->d3d11.dss && !pip->d3d11.bs);
  12176. pip->d3d11.index_format = _sg_d3d11_index_format(pip->cmn.index_type);
  12177. pip->d3d11.topology = _sg_d3d11_primitive_topology(desc->primitive_type);
  12178. pip->d3d11.stencil_ref = desc->stencil.ref;
  12179. // create input layout object
  12180. HRESULT hr;
  12181. _SG_STRUCT(D3D11_INPUT_ELEMENT_DESC, d3d11_comps[SG_MAX_VERTEX_ATTRIBUTES]);
  12182. size_t attr_index = 0;
  12183. for (; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  12184. const sg_vertex_attr_state* a_state = &desc->layout.attrs[attr_index];
  12185. if (a_state->format == SG_VERTEXFORMAT_INVALID) {
  12186. break;
  12187. }
  12188. SOKOL_ASSERT(a_state->buffer_index < SG_MAX_VERTEXBUFFER_BINDSLOTS);
  12189. SOKOL_ASSERT(pip->cmn.vertex_buffer_layout_active[a_state->buffer_index]);
  12190. const sg_vertex_buffer_layout_state* l_state = &desc->layout.buffers[a_state->buffer_index];
  12191. const sg_vertex_step step_func = l_state->step_func;
  12192. const int step_rate = l_state->step_rate;
  12193. D3D11_INPUT_ELEMENT_DESC* d3d11_comp = &d3d11_comps[attr_index];
  12194. d3d11_comp->SemanticName = _sg_strptr(&shd->d3d11.attrs[attr_index].sem_name);
  12195. d3d11_comp->SemanticIndex = (UINT)shd->d3d11.attrs[attr_index].sem_index;
  12196. d3d11_comp->Format = _sg_d3d11_vertex_format(a_state->format);
  12197. d3d11_comp->InputSlot = (UINT)a_state->buffer_index;
  12198. d3d11_comp->AlignedByteOffset = (UINT)a_state->offset;
  12199. d3d11_comp->InputSlotClass = _sg_d3d11_input_classification(step_func);
  12200. if (SG_VERTEXSTEP_PER_INSTANCE == step_func) {
  12201. d3d11_comp->InstanceDataStepRate = (UINT)step_rate;
  12202. }
  12203. }
  12204. for (size_t layout_index = 0; layout_index < SG_MAX_VERTEXBUFFER_BINDSLOTS; layout_index++) {
  12205. if (pip->cmn.vertex_buffer_layout_active[layout_index]) {
  12206. const sg_vertex_buffer_layout_state* l_state = &desc->layout.buffers[layout_index];
  12207. SOKOL_ASSERT(l_state->stride > 0);
  12208. pip->d3d11.vb_strides[layout_index] = (UINT)l_state->stride;
  12209. } else {
  12210. pip->d3d11.vb_strides[layout_index] = 0;
  12211. }
  12212. }
  12213. if (attr_index > 0) {
  12214. hr = _sg_d3d11_CreateInputLayout(_sg.d3d11.dev,
  12215. d3d11_comps, // pInputElementDesc
  12216. (UINT)attr_index, // NumElements
  12217. shd->d3d11.vs_blob, // pShaderByteCodeWithInputSignature
  12218. shd->d3d11.vs_blob_length, // BytecodeLength
  12219. &pip->d3d11.il);
  12220. if (!(SUCCEEDED(hr) && pip->d3d11.il)) {
  12221. _SG_ERROR(D3D11_CREATE_INPUT_LAYOUT_FAILED);
  12222. return SG_RESOURCESTATE_FAILED;
  12223. }
  12224. _sg_d3d11_setlabel(pip->d3d11.il, desc->label);
  12225. }
  12226. // create rasterizer state
  12227. _SG_STRUCT(D3D11_RASTERIZER_DESC, rs_desc);
  12228. rs_desc.FillMode = D3D11_FILL_SOLID;
  12229. rs_desc.CullMode = _sg_d3d11_cull_mode(desc->cull_mode);
  12230. rs_desc.FrontCounterClockwise = desc->face_winding == SG_FACEWINDING_CCW;
  12231. rs_desc.DepthBias = (INT) pip->cmn.depth.bias;
  12232. rs_desc.DepthBiasClamp = pip->cmn.depth.bias_clamp;
  12233. rs_desc.SlopeScaledDepthBias = pip->cmn.depth.bias_slope_scale;
  12234. rs_desc.DepthClipEnable = TRUE;
  12235. rs_desc.ScissorEnable = TRUE;
  12236. rs_desc.MultisampleEnable = desc->sample_count > 1;
  12237. rs_desc.AntialiasedLineEnable = FALSE;
  12238. hr = _sg_d3d11_CreateRasterizerState(_sg.d3d11.dev, &rs_desc, &pip->d3d11.rs);
  12239. if (!(SUCCEEDED(hr) && pip->d3d11.rs)) {
  12240. _SG_ERROR(D3D11_CREATE_RASTERIZER_STATE_FAILED);
  12241. return SG_RESOURCESTATE_FAILED;
  12242. }
  12243. _sg_d3d11_setlabel(pip->d3d11.rs, desc->label);
  12244. // create depth-stencil state
  12245. _SG_STRUCT(D3D11_DEPTH_STENCIL_DESC, dss_desc);
  12246. dss_desc.DepthEnable = TRUE;
  12247. dss_desc.DepthWriteMask = desc->depth.write_enabled ? D3D11_DEPTH_WRITE_MASK_ALL : D3D11_DEPTH_WRITE_MASK_ZERO;
  12248. dss_desc.DepthFunc = _sg_d3d11_compare_func(desc->depth.compare);
  12249. dss_desc.StencilEnable = desc->stencil.enabled;
  12250. dss_desc.StencilReadMask = desc->stencil.read_mask;
  12251. dss_desc.StencilWriteMask = desc->stencil.write_mask;
  12252. const sg_stencil_face_state* sf = &desc->stencil.front;
  12253. dss_desc.FrontFace.StencilFailOp = _sg_d3d11_stencil_op(sf->fail_op);
  12254. dss_desc.FrontFace.StencilDepthFailOp = _sg_d3d11_stencil_op(sf->depth_fail_op);
  12255. dss_desc.FrontFace.StencilPassOp = _sg_d3d11_stencil_op(sf->pass_op);
  12256. dss_desc.FrontFace.StencilFunc = _sg_d3d11_compare_func(sf->compare);
  12257. const sg_stencil_face_state* sb = &desc->stencil.back;
  12258. dss_desc.BackFace.StencilFailOp = _sg_d3d11_stencil_op(sb->fail_op);
  12259. dss_desc.BackFace.StencilDepthFailOp = _sg_d3d11_stencil_op(sb->depth_fail_op);
  12260. dss_desc.BackFace.StencilPassOp = _sg_d3d11_stencil_op(sb->pass_op);
  12261. dss_desc.BackFace.StencilFunc = _sg_d3d11_compare_func(sb->compare);
  12262. hr = _sg_d3d11_CreateDepthStencilState(_sg.d3d11.dev, &dss_desc, &pip->d3d11.dss);
  12263. if (!(SUCCEEDED(hr) && pip->d3d11.dss)) {
  12264. _SG_ERROR(D3D11_CREATE_DEPTH_STENCIL_STATE_FAILED);
  12265. return SG_RESOURCESTATE_FAILED;
  12266. }
  12267. _sg_d3d11_setlabel(pip->d3d11.dss, desc->label);
  12268. // create blend state
  12269. _SG_STRUCT(D3D11_BLEND_DESC, bs_desc);
  12270. bs_desc.AlphaToCoverageEnable = desc->alpha_to_coverage_enabled;
  12271. bs_desc.IndependentBlendEnable = TRUE;
  12272. {
  12273. size_t i = 0;
  12274. for (i = 0; i < (size_t)desc->color_count; i++) {
  12275. const sg_blend_state* src = &desc->colors[i].blend;
  12276. D3D11_RENDER_TARGET_BLEND_DESC* dst = &bs_desc.RenderTarget[i];
  12277. dst->BlendEnable = src->enabled;
  12278. dst->SrcBlend = _sg_d3d11_blend_factor(src->src_factor_rgb);
  12279. dst->DestBlend = _sg_d3d11_blend_factor(src->dst_factor_rgb);
  12280. dst->BlendOp = _sg_d3d11_blend_op(src->op_rgb);
  12281. dst->SrcBlendAlpha = _sg_d3d11_blend_factor(src->src_factor_alpha);
  12282. dst->DestBlendAlpha = _sg_d3d11_blend_factor(src->dst_factor_alpha);
  12283. dst->BlendOpAlpha = _sg_d3d11_blend_op(src->op_alpha);
  12284. dst->RenderTargetWriteMask = _sg_d3d11_color_write_mask(desc->colors[i].write_mask);
  12285. }
  12286. for (; i < 8; i++) {
  12287. D3D11_RENDER_TARGET_BLEND_DESC* dst = &bs_desc.RenderTarget[i];
  12288. dst->BlendEnable = FALSE;
  12289. dst->SrcBlend = dst->SrcBlendAlpha = D3D11_BLEND_ONE;
  12290. dst->DestBlend = dst->DestBlendAlpha = D3D11_BLEND_ZERO;
  12291. dst->BlendOp = dst->BlendOpAlpha = D3D11_BLEND_OP_ADD;
  12292. dst->RenderTargetWriteMask = D3D11_COLOR_WRITE_ENABLE_ALL;
  12293. }
  12294. }
  12295. hr = _sg_d3d11_CreateBlendState(_sg.d3d11.dev, &bs_desc, &pip->d3d11.bs);
  12296. if (!(SUCCEEDED(hr) && pip->d3d11.bs)) {
  12297. _SG_ERROR(D3D11_CREATE_BLEND_STATE_FAILED);
  12298. return SG_RESOURCESTATE_FAILED;
  12299. }
  12300. _sg_d3d11_setlabel(pip->d3d11.bs, desc->label);
  12301. return SG_RESOURCESTATE_VALID;
  12302. }
  12303. _SOKOL_PRIVATE void _sg_d3d11_discard_pipeline(_sg_pipeline_t* pip) {
  12304. SOKOL_ASSERT(pip);
  12305. if (pip->d3d11.il) {
  12306. _sg_d3d11_Release(pip->d3d11.il);
  12307. }
  12308. if (pip->d3d11.rs) {
  12309. _sg_d3d11_Release(pip->d3d11.rs);
  12310. }
  12311. if (pip->d3d11.dss) {
  12312. _sg_d3d11_Release(pip->d3d11.dss);
  12313. }
  12314. if (pip->d3d11.bs) {
  12315. _sg_d3d11_Release(pip->d3d11.bs);
  12316. }
  12317. }
  12318. _SOKOL_PRIVATE sg_resource_state _sg_d3d11_create_view(_sg_view_t* view, const sg_view_desc* desc) {
  12319. SOKOL_ASSERT(view && desc);
  12320. _SOKOL_UNUSED(desc);
  12321. HRESULT hr;
  12322. if (view->cmn.type == SG_VIEWTYPE_STORAGEBUFFER) {
  12323. const _sg_buffer_t* buf = _sg_buffer_ref_ptr(&view->cmn.buf.ref);
  12324. SOKOL_ASSERT(buf->d3d11.buf);
  12325. const UINT size = (UINT) buf->cmn.size;
  12326. SOKOL_ASSERT(_sg_multiple_u64(size, 4));
  12327. const UINT offset = (UINT) view->cmn.buf.offset;
  12328. SOKOL_ASSERT(_sg_multiple_u64(offset, 4));
  12329. SOKOL_ASSERT(offset < size);
  12330. const UINT first_element = offset / 4;
  12331. const UINT num_elements = (size - offset) / 4;
  12332. _SG_STRUCT(D3D11_SHADER_RESOURCE_VIEW_DESC, d3d11_srv_desc);
  12333. d3d11_srv_desc.Format = DXGI_FORMAT_R32_TYPELESS;
  12334. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_BUFFEREX;
  12335. d3d11_srv_desc.BufferEx.FirstElement = first_element;
  12336. d3d11_srv_desc.BufferEx.NumElements = num_elements;
  12337. d3d11_srv_desc.BufferEx.Flags = D3D11_BUFFEREX_SRV_FLAG_RAW;
  12338. SOKOL_ASSERT(!view->d3d11.srv);
  12339. hr = _sg_d3d11_CreateShaderResourceView(_sg.d3d11.dev, (ID3D11Resource*)buf->d3d11.buf, &d3d11_srv_desc, &view->d3d11.srv);
  12340. if (!(SUCCEEDED(hr) && view->d3d11.srv)) {
  12341. _SG_ERROR(D3D11_CREATE_BUFFER_SRV_FAILED);
  12342. return SG_RESOURCESTATE_FAILED;
  12343. }
  12344. _sg_d3d11_setlabel(view->d3d11.srv, desc->label);
  12345. if (buf->cmn.usage.immutable) {
  12346. _SG_STRUCT(D3D11_UNORDERED_ACCESS_VIEW_DESC, d3d11_uav_desc);
  12347. d3d11_uav_desc.Format = DXGI_FORMAT_R32_TYPELESS;
  12348. d3d11_uav_desc.ViewDimension = D3D11_UAV_DIMENSION_BUFFER;
  12349. d3d11_uav_desc.Buffer.FirstElement = first_element;
  12350. d3d11_uav_desc.Buffer.NumElements = num_elements;
  12351. d3d11_uav_desc.Buffer.Flags = D3D11_BUFFER_UAV_FLAG_RAW;
  12352. SOKOL_ASSERT(!view->d3d11.uav);
  12353. hr = _sg_d3d11_CreateUnorderedAccessView(_sg.d3d11.dev, (ID3D11Resource*)buf->d3d11.buf, &d3d11_uav_desc, &view->d3d11.uav);
  12354. if (!(SUCCEEDED(hr) && view->d3d11.uav)) {
  12355. _SG_ERROR(D3D11_CREATE_BUFFER_UAV_FAILED);
  12356. return SG_RESOURCESTATE_FAILED;
  12357. }
  12358. _sg_d3d11_setlabel(view->d3d11.uav, desc->label);
  12359. }
  12360. } else {
  12361. // it's an image view
  12362. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  12363. SOKOL_ASSERT(img->d3d11.res);
  12364. const bool msaa = img->cmn.sample_count > 1;
  12365. SOKOL_ASSERT(view->cmn.img.mip_level_count >= 1);
  12366. SOKOL_ASSERT(view->cmn.img.slice_count >= 1);
  12367. const UINT mip_level = (UINT)view->cmn.img.mip_level;
  12368. const UINT mip_count = (UINT)view->cmn.img.mip_level_count;
  12369. const UINT slice = (UINT)view->cmn.img.slice;
  12370. const UINT slice_count = (UINT)view->cmn.img.slice_count;
  12371. if (view->cmn.type == SG_VIEWTYPE_STORAGEIMAGE) {
  12372. SOKOL_ASSERT(!msaa);
  12373. _SG_STRUCT(D3D11_UNORDERED_ACCESS_VIEW_DESC, d3d11_uav_desc);
  12374. d3d11_uav_desc.Format = _sg_d3d11_rtv_uav_pixel_format(img->cmn.pixel_format);
  12375. switch (img->cmn.type) {
  12376. case SG_IMAGETYPE_2D:
  12377. d3d11_uav_desc.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE2D;
  12378. d3d11_uav_desc.Texture2D.MipSlice = mip_level;
  12379. break;
  12380. case SG_IMAGETYPE_CUBE:
  12381. case SG_IMAGETYPE_ARRAY:
  12382. d3d11_uav_desc.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE2DARRAY;
  12383. d3d11_uav_desc.Texture2DArray.MipSlice = mip_level;
  12384. d3d11_uav_desc.Texture2DArray.FirstArraySlice = slice;
  12385. d3d11_uav_desc.Texture2DArray.ArraySize = 1;
  12386. break;
  12387. case SG_IMAGETYPE_3D:
  12388. d3d11_uav_desc.ViewDimension = D3D11_UAV_DIMENSION_TEXTURE3D;
  12389. d3d11_uav_desc.Texture3D.MipSlice = mip_level;
  12390. d3d11_uav_desc.Texture3D.FirstWSlice = slice;
  12391. d3d11_uav_desc.Texture3D.WSize = 1;
  12392. break;
  12393. default: SOKOL_UNREACHABLE; break;
  12394. }
  12395. hr = _sg_d3d11_CreateUnorderedAccessView(_sg.d3d11.dev, img->d3d11.res, &d3d11_uav_desc, &view->d3d11.uav);
  12396. if (!(SUCCEEDED(hr) && view->d3d11.uav)) {
  12397. _SG_ERROR(D3D11_CREATE_UAV_FAILED);
  12398. return SG_RESOURCESTATE_FAILED;
  12399. }
  12400. _sg_d3d11_setlabel(view->d3d11.uav, desc->label);
  12401. } else if (view->cmn.type == SG_VIEWTYPE_TEXTURE) {
  12402. _SG_STRUCT(D3D11_SHADER_RESOURCE_VIEW_DESC, d3d11_srv_desc);
  12403. d3d11_srv_desc.Format = _sg_d3d11_srv_pixel_format(img->cmn.pixel_format);
  12404. switch (img->cmn.type) {
  12405. case SG_IMAGETYPE_2D:
  12406. if (msaa) {
  12407. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2DMS;
  12408. } else {
  12409. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D;
  12410. d3d11_srv_desc.Texture2D.MostDetailedMip = mip_level;
  12411. d3d11_srv_desc.Texture2D.MipLevels = mip_count;
  12412. }
  12413. break;
  12414. case SG_IMAGETYPE_CUBE:
  12415. SOKOL_ASSERT(!msaa);
  12416. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURECUBE;
  12417. d3d11_srv_desc.TextureCube.MostDetailedMip = mip_level;
  12418. d3d11_srv_desc.TextureCube.MipLevels = mip_count;
  12419. break;
  12420. case SG_IMAGETYPE_ARRAY:
  12421. if (msaa) {
  12422. // NOTE: _sg_validate_image_desc() currently disallows MSAA array textures
  12423. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2DMSARRAY;
  12424. d3d11_srv_desc.Texture2DMSArray.FirstArraySlice = slice;
  12425. d3d11_srv_desc.Texture2DMSArray.ArraySize = slice_count;
  12426. } else {
  12427. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2DARRAY;
  12428. d3d11_srv_desc.Texture2DArray.MostDetailedMip = mip_level;
  12429. d3d11_srv_desc.Texture2DArray.MipLevels = mip_count;
  12430. d3d11_srv_desc.Texture2DArray.FirstArraySlice = slice;
  12431. d3d11_srv_desc.Texture2DArray.ArraySize = slice_count;
  12432. }
  12433. break;
  12434. case SG_IMAGETYPE_3D:
  12435. SOKOL_ASSERT(!msaa);
  12436. d3d11_srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE3D;
  12437. d3d11_srv_desc.Texture3D.MostDetailedMip = mip_level;
  12438. d3d11_srv_desc.Texture3D.MipLevels = mip_count;
  12439. break;
  12440. default:
  12441. SOKOL_UNREACHABLE; break;
  12442. }
  12443. hr = _sg_d3d11_CreateShaderResourceView(_sg.d3d11.dev, img->d3d11.res, &d3d11_srv_desc, &view->d3d11.srv);
  12444. if (!(SUCCEEDED(hr) && view->d3d11.srv)) {
  12445. _SG_ERROR(D3D11_CREATE_2D_SRV_FAILED);
  12446. return SG_RESOURCESTATE_FAILED;
  12447. }
  12448. _sg_d3d11_setlabel(view->d3d11.srv, desc->label);
  12449. } else if (view->cmn.type == SG_VIEWTYPE_COLORATTACHMENT) {
  12450. _SG_STRUCT(D3D11_RENDER_TARGET_VIEW_DESC, d3d11_rtv_desc);
  12451. d3d11_rtv_desc.Format = _sg_d3d11_rtv_uav_pixel_format(img->cmn.pixel_format);
  12452. switch (img->cmn.type) {
  12453. case SG_IMAGETYPE_2D:
  12454. if (msaa) {
  12455. d3d11_rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DMS;
  12456. } else {
  12457. d3d11_rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
  12458. d3d11_rtv_desc.Texture2D.MipSlice = mip_level;
  12459. }
  12460. break;
  12461. case SG_IMAGETYPE_CUBE:
  12462. case SG_IMAGETYPE_ARRAY:
  12463. if (msaa) {
  12464. d3d11_rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DMSARRAY;
  12465. d3d11_rtv_desc.Texture2DMSArray.FirstArraySlice = slice;
  12466. d3d11_rtv_desc.Texture2DMSArray.ArraySize = 1;
  12467. } else {
  12468. d3d11_rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2DARRAY;
  12469. d3d11_rtv_desc.Texture2DArray.MipSlice = mip_level;
  12470. d3d11_rtv_desc.Texture2DArray.FirstArraySlice = slice;
  12471. d3d11_rtv_desc.Texture2DArray.ArraySize = 1;
  12472. }
  12473. break;
  12474. case SG_IMAGETYPE_3D:
  12475. SOKOL_ASSERT(!msaa);
  12476. d3d11_rtv_desc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE3D;
  12477. d3d11_rtv_desc.Texture3D.MipSlice = mip_level;
  12478. d3d11_rtv_desc.Texture3D.FirstWSlice = slice;
  12479. d3d11_rtv_desc.Texture3D.WSize = 1;
  12480. break;
  12481. default: SOKOL_UNREACHABLE; break;
  12482. }
  12483. hr = _sg_d3d11_CreateRenderTargetView(_sg.d3d11.dev, img->d3d11.res, &d3d11_rtv_desc, &view->d3d11.rtv);
  12484. if (!(SUCCEEDED(hr) && view->d3d11.rtv)) {
  12485. _SG_ERROR(D3D11_CREATE_RTV_FAILED);
  12486. return SG_RESOURCESTATE_FAILED;
  12487. }
  12488. _sg_d3d11_setlabel(view->d3d11.rtv, desc->label);
  12489. } else if (view->cmn.type == SG_VIEWTYPE_DEPTHSTENCILATTACHMENT) {
  12490. SOKOL_ASSERT(img->cmn.type != SG_IMAGETYPE_3D);
  12491. _SG_STRUCT(D3D11_DEPTH_STENCIL_VIEW_DESC, d3d11_dsv_desc);
  12492. d3d11_dsv_desc.Format = _sg_d3d11_dsv_pixel_format(img->cmn.pixel_format);
  12493. switch (img->cmn.type) {
  12494. case SG_IMAGETYPE_2D:
  12495. if (msaa) {
  12496. d3d11_dsv_desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DMS;
  12497. } else {
  12498. d3d11_dsv_desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
  12499. d3d11_dsv_desc.Texture2D.MipSlice = mip_level;
  12500. }
  12501. break;
  12502. case SG_IMAGETYPE_CUBE:
  12503. case SG_IMAGETYPE_ARRAY:
  12504. if (msaa) {
  12505. d3d11_dsv_desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DMSARRAY;
  12506. d3d11_dsv_desc.Texture2DMSArray.FirstArraySlice = slice;
  12507. d3d11_dsv_desc.Texture2DMSArray.ArraySize = 1;
  12508. } else {
  12509. d3d11_dsv_desc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2DARRAY;
  12510. d3d11_dsv_desc.Texture2DArray.MipSlice = mip_level;
  12511. d3d11_dsv_desc.Texture2DArray.FirstArraySlice = slice;
  12512. d3d11_dsv_desc.Texture2DArray.ArraySize = 1;
  12513. }
  12514. break;
  12515. default: SOKOL_UNREACHABLE; break;
  12516. }
  12517. hr = _sg_d3d11_CreateDepthStencilView(_sg.d3d11.dev, img->d3d11.res, &d3d11_dsv_desc, &view->d3d11.dsv);
  12518. if (!(SUCCEEDED(hr) && view->d3d11.dsv)) {
  12519. _SG_ERROR(D3D11_CREATE_DSV_FAILED);
  12520. return SG_RESOURCESTATE_FAILED;
  12521. }
  12522. _sg_d3d11_setlabel(view->d3d11.dsv, desc->label);
  12523. }
  12524. }
  12525. return SG_RESOURCESTATE_VALID;
  12526. }
  12527. _SOKOL_PRIVATE void _sg_d3d11_discard_view(_sg_view_t* view) {
  12528. SOKOL_ASSERT(view);
  12529. if (view->d3d11.srv) {
  12530. _sg_d3d11_Release(view->d3d11.srv);
  12531. }
  12532. if (view->d3d11.uav) {
  12533. _sg_d3d11_Release(view->d3d11.uav);
  12534. }
  12535. if (view->d3d11.rtv) {
  12536. _sg_d3d11_Release(view->d3d11.rtv);
  12537. }
  12538. if (view->d3d11.dsv) {
  12539. _sg_d3d11_Release(view->d3d11.dsv);
  12540. }
  12541. }
  12542. _SOKOL_PRIVATE void _sg_d3d11_begin_pass(const sg_pass* pass, const _sg_attachments_ptrs_t* atts) {
  12543. SOKOL_ASSERT(_sg.d3d11.ctx && pass && atts);
  12544. if (_sg.cur_pass.is_compute) {
  12545. // nothing to do in compute passes
  12546. return;
  12547. }
  12548. int num_rtvs = 0;
  12549. ID3D11RenderTargetView* rtvs[SG_MAX_COLOR_ATTACHMENTS] = { 0 };
  12550. ID3D11DepthStencilView* dsv = 0;
  12551. _sg.d3d11.cur_swapchain.render_view = 0;
  12552. _sg.d3d11.cur_swapchain.resolve_view = 0;
  12553. if (!atts->empty) {
  12554. SOKOL_ASSERT(atts->num_color_views <= SG_MAX_COLOR_ATTACHMENTS);
  12555. num_rtvs = atts->num_color_views;
  12556. for (int i = 0; i < num_rtvs; i++) {
  12557. SOKOL_ASSERT(atts->color_views[i]);
  12558. SOKOL_ASSERT(atts->color_views[i]->d3d11.rtv);
  12559. rtvs[i] = atts->color_views[i]->d3d11.rtv;
  12560. }
  12561. if (atts->ds_view) {
  12562. SOKOL_ASSERT(atts->ds_view->d3d11.dsv);
  12563. dsv = atts->ds_view->d3d11.dsv;
  12564. }
  12565. } else {
  12566. // NOTE: swapchain depth-stencil-view is optional
  12567. const sg_swapchain* swapchain = &pass->swapchain;
  12568. SOKOL_ASSERT(swapchain->d3d11.render_view);
  12569. num_rtvs = 1;
  12570. rtvs[0] = (ID3D11RenderTargetView*) swapchain->d3d11.render_view;
  12571. dsv = (ID3D11DepthStencilView*) swapchain->d3d11.depth_stencil_view;
  12572. _sg.d3d11.cur_swapchain.render_view = (ID3D11RenderTargetView*) swapchain->d3d11.render_view;
  12573. _sg.d3d11.cur_swapchain.resolve_view = (ID3D11RenderTargetView*) swapchain->d3d11.resolve_view;
  12574. }
  12575. // apply the render-target- and depth-stencil-views
  12576. _sg_d3d11_OMSetRenderTargets(_sg.d3d11.ctx, SG_MAX_COLOR_ATTACHMENTS, rtvs, dsv);
  12577. _sg_stats_inc(d3d11.pass.num_om_set_render_targets);
  12578. // set viewport and scissor rect to cover whole screen
  12579. _SG_STRUCT(D3D11_VIEWPORT, vp);
  12580. vp.Width = (FLOAT) _sg.cur_pass.dim.width;
  12581. vp.Height = (FLOAT) _sg.cur_pass.dim.height;
  12582. vp.MaxDepth = 1.0f;
  12583. _sg_d3d11_RSSetViewports(_sg.d3d11.ctx, 1, &vp);
  12584. D3D11_RECT rect;
  12585. rect.left = 0;
  12586. rect.top = 0;
  12587. rect.right = _sg.cur_pass.dim.width;
  12588. rect.bottom = _sg.cur_pass.dim.height;
  12589. _sg_d3d11_RSSetScissorRects(_sg.d3d11.ctx, 1, &rect);
  12590. // perform clear action
  12591. const sg_pass_action* action = &pass->action;
  12592. for (size_t i = 0; i < (size_t)num_rtvs; i++) {
  12593. if (action->colors[i].load_action == SG_LOADACTION_CLEAR) {
  12594. _sg_d3d11_ClearRenderTargetView(_sg.d3d11.ctx, rtvs[i], (float*)&action->colors[i].clear_value);
  12595. _sg_stats_inc(d3d11.pass.num_clear_render_target_view);
  12596. }
  12597. }
  12598. UINT ds_flags = 0;
  12599. if (action->depth.load_action == SG_LOADACTION_CLEAR) {
  12600. ds_flags |= D3D11_CLEAR_DEPTH;
  12601. }
  12602. if (action->stencil.load_action == SG_LOADACTION_CLEAR) {
  12603. ds_flags |= D3D11_CLEAR_STENCIL;
  12604. }
  12605. if ((0 != ds_flags) && dsv) {
  12606. _sg_d3d11_ClearDepthStencilView(_sg.d3d11.ctx, dsv, ds_flags, action->depth.clear_value, action->stencil.clear_value);
  12607. _sg_stats_inc(d3d11.pass.num_clear_depth_stencil_view);
  12608. }
  12609. }
  12610. // D3D11CalcSubresource only exists for C++
  12611. _SOKOL_PRIVATE UINT _sg_d3d11_calcsubresource(UINT mip_slice, UINT array_slice, UINT mip_levels) {
  12612. return mip_slice + array_slice * mip_levels;
  12613. }
  12614. _SOKOL_PRIVATE void _sg_d3d11_end_pass(const _sg_attachments_ptrs_t* atts) {
  12615. SOKOL_ASSERT(_sg.d3d11.ctx && atts);
  12616. if (!_sg.cur_pass.is_compute) {
  12617. // need to resolve MSAA render attachments into texture?
  12618. if (!atts->empty) {
  12619. // ...for offscreen pass...
  12620. for (int i = 0; i < atts->num_color_views; i++) {
  12621. const _sg_view_t* resolve_view = atts->resolve_views[i];
  12622. if (resolve_view) {
  12623. const _sg_image_t* resolve_img = _sg_image_ref_ptr(&resolve_view->cmn.img.ref);
  12624. const _sg_view_t* color_view = atts->color_views[i];
  12625. SOKOL_ASSERT(color_view);
  12626. const _sg_image_t* color_img = _sg_image_ref_ptr(&color_view->cmn.img.ref);
  12627. SOKOL_ASSERT(color_img->cmn.sample_count > 1);
  12628. SOKOL_ASSERT(resolve_img->cmn.sample_count == 1);
  12629. const UINT src_subres = _sg_d3d11_calcsubresource(
  12630. (UINT)color_view->cmn.img.mip_level,
  12631. (UINT)color_view->cmn.img.slice,
  12632. (UINT)color_img->cmn.num_mipmaps);
  12633. const UINT dst_subres = _sg_d3d11_calcsubresource(
  12634. (UINT)resolve_view->cmn.img.mip_level,
  12635. (UINT)resolve_view->cmn.img.slice,
  12636. (UINT)resolve_img->cmn.num_mipmaps);
  12637. _sg_d3d11_ResolveSubresource(_sg.d3d11.ctx,
  12638. resolve_img->d3d11.res,
  12639. dst_subres,
  12640. color_img->d3d11.res,
  12641. src_subres,
  12642. color_img->d3d11.format);
  12643. _sg_stats_inc(d3d11.pass.num_resolve_subresource);
  12644. }
  12645. }
  12646. } else {
  12647. // ...for swapchain pass...
  12648. if (_sg.d3d11.cur_swapchain.resolve_view) {
  12649. SOKOL_ASSERT(_sg.d3d11.cur_swapchain.render_view);
  12650. SOKOL_ASSERT(_sg.cur_pass.swapchain.sample_count > 1);
  12651. SOKOL_ASSERT(_sg.cur_pass.swapchain.color_fmt > SG_PIXELFORMAT_NONE);
  12652. ID3D11Resource* d3d11_render_res = 0;
  12653. ID3D11Resource* d3d11_resolve_res = 0;
  12654. _sg_d3d11_GetResource((ID3D11View*)_sg.d3d11.cur_swapchain.render_view, &d3d11_render_res);
  12655. _sg_d3d11_GetResource((ID3D11View*)_sg.d3d11.cur_swapchain.resolve_view, &d3d11_resolve_res);
  12656. SOKOL_ASSERT(d3d11_render_res);
  12657. SOKOL_ASSERT(d3d11_resolve_res);
  12658. const sg_pixel_format color_fmt = _sg.cur_pass.swapchain.color_fmt;
  12659. _sg_d3d11_ResolveSubresource(_sg.d3d11.ctx, d3d11_resolve_res, 0, d3d11_render_res, 0, _sg_d3d11_rtv_uav_pixel_format(color_fmt));
  12660. _sg_d3d11_Release(d3d11_render_res);
  12661. _sg_d3d11_Release(d3d11_resolve_res);
  12662. _sg_stats_inc(d3d11.pass.num_resolve_subresource);
  12663. }
  12664. }
  12665. }
  12666. _sg.d3d11.cur_swapchain.render_view = 0;
  12667. _sg.d3d11.cur_swapchain.resolve_view = 0;
  12668. _sg_d3d11_clear_state();
  12669. }
  12670. _SOKOL_PRIVATE void _sg_d3d11_apply_viewport(int x, int y, int w, int h, bool origin_top_left) {
  12671. SOKOL_ASSERT(_sg.d3d11.ctx);
  12672. D3D11_VIEWPORT vp;
  12673. vp.TopLeftX = (FLOAT) x;
  12674. vp.TopLeftY = (FLOAT) (origin_top_left ? y : (_sg.cur_pass.dim.height - (y + h)));
  12675. vp.Width = (FLOAT) w;
  12676. vp.Height = (FLOAT) h;
  12677. vp.MinDepth = 0.0f;
  12678. vp.MaxDepth = 1.0f;
  12679. _sg_d3d11_RSSetViewports(_sg.d3d11.ctx, 1, &vp);
  12680. }
  12681. _SOKOL_PRIVATE void _sg_d3d11_apply_scissor_rect(int x, int y, int w, int h, bool origin_top_left) {
  12682. SOKOL_ASSERT(_sg.d3d11.ctx);
  12683. D3D11_RECT rect;
  12684. rect.left = x;
  12685. rect.top = (origin_top_left ? y : (_sg.cur_pass.dim.height - (y + h)));
  12686. rect.right = x + w;
  12687. rect.bottom = origin_top_left ? (y + h) : (_sg.cur_pass.dim.height - y);
  12688. _sg_d3d11_RSSetScissorRects(_sg.d3d11.ctx, 1, &rect);
  12689. }
  12690. _SOKOL_PRIVATE void _sg_d3d11_apply_pipeline(_sg_pipeline_t* pip) {
  12691. SOKOL_ASSERT(pip);
  12692. SOKOL_ASSERT(_sg.d3d11.ctx);
  12693. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  12694. if (pip->cmn.is_compute) {
  12695. // a compute pipeline
  12696. SOKOL_ASSERT(shd->d3d11.cs);
  12697. _sg_d3d11_CSSetShader(_sg.d3d11.ctx, shd->d3d11.cs, NULL, 0);
  12698. _sg_d3d11_CSSetConstantBuffers(_sg.d3d11.ctx, 0, _SG_D3D11_MAX_STAGE_UB_BINDINGS, shd->d3d11.cs_cbufs);
  12699. _sg_stats_inc(d3d11.pipeline.num_cs_set_shader);
  12700. _sg_stats_inc(d3d11.pipeline.num_cs_set_constant_buffers);
  12701. } else {
  12702. // a render pipeline
  12703. SOKOL_ASSERT(pip->d3d11.rs && pip->d3d11.bs && pip->d3d11.dss);
  12704. SOKOL_ASSERT(shd->d3d11.vs);
  12705. SOKOL_ASSERT(shd->d3d11.fs);
  12706. _sg_d3d11_RSSetState(_sg.d3d11.ctx, pip->d3d11.rs);
  12707. _sg_d3d11_OMSetDepthStencilState(_sg.d3d11.ctx, pip->d3d11.dss, pip->d3d11.stencil_ref);
  12708. _sg_d3d11_OMSetBlendState(_sg.d3d11.ctx, pip->d3d11.bs, (float*)&pip->cmn.blend_color, 0xFFFFFFFF);
  12709. _sg_d3d11_IASetPrimitiveTopology(_sg.d3d11.ctx, pip->d3d11.topology);
  12710. _sg_d3d11_IASetInputLayout(_sg.d3d11.ctx, pip->d3d11.il);
  12711. _sg_d3d11_VSSetShader(_sg.d3d11.ctx, shd->d3d11.vs, NULL, 0);
  12712. _sg_d3d11_VSSetConstantBuffers(_sg.d3d11.ctx, 0, _SG_D3D11_MAX_STAGE_UB_BINDINGS, shd->d3d11.vs_cbufs);
  12713. _sg_d3d11_PSSetShader(_sg.d3d11.ctx, shd->d3d11.fs, NULL, 0);
  12714. _sg_d3d11_PSSetConstantBuffers(_sg.d3d11.ctx, 0, _SG_D3D11_MAX_STAGE_UB_BINDINGS, shd->d3d11.fs_cbufs);
  12715. _sg_stats_inc(d3d11.pipeline.num_rs_set_state);
  12716. _sg_stats_inc(d3d11.pipeline.num_om_set_depth_stencil_state);
  12717. _sg_stats_inc(d3d11.pipeline.num_om_set_blend_state);
  12718. _sg_stats_inc(d3d11.pipeline.num_ia_set_primitive_topology);
  12719. _sg_stats_inc(d3d11.pipeline.num_ia_set_input_layout);
  12720. _sg_stats_inc(d3d11.pipeline.num_vs_set_shader);
  12721. _sg_stats_inc(d3d11.pipeline.num_vs_set_constant_buffers);
  12722. _sg_stats_inc(d3d11.pipeline.num_ps_set_shader);
  12723. _sg_stats_inc(d3d11.pipeline.num_ps_set_constant_buffers);
  12724. }
  12725. }
  12726. _SOKOL_PRIVATE bool _sg_d3d11_apply_bindings(_sg_bindings_ptrs_t* bnd) {
  12727. SOKOL_ASSERT(bnd);
  12728. SOKOL_ASSERT(bnd->pip);
  12729. SOKOL_ASSERT(_sg.d3d11.ctx);
  12730. const _sg_shader_t* shd = _sg_shader_ref_ptr(&bnd->pip->cmn.shader);
  12731. const bool is_compute = bnd->pip->cmn.is_compute;
  12732. if (is_compute) {
  12733. _sg_clear(&_sg.d3d11.bnd.cs_srvs, sizeof(_sg.d3d11.bnd.cs_srvs));
  12734. _sg_clear(&_sg.d3d11.bnd.cs_uavs, sizeof(_sg.d3d11.bnd.cs_uavs));
  12735. _sg_clear(&_sg.d3d11.bnd.cs_smps, sizeof(_sg.d3d11.bnd.cs_smps));
  12736. } else {
  12737. _sg_clear(&_sg.d3d11.bnd.vbs, sizeof(_sg.d3d11.bnd.vbs));
  12738. _sg_clear(&_sg.d3d11.bnd.vb_offsets, sizeof(_sg.d3d11.bnd.vb_offsets));
  12739. _sg_clear(&_sg.d3d11.bnd.vs_srvs, sizeof(_sg.d3d11.bnd.vs_srvs));
  12740. _sg_clear(&_sg.d3d11.bnd.fs_srvs, sizeof(_sg.d3d11.bnd.fs_srvs));
  12741. _sg_clear(&_sg.d3d11.bnd.vs_smps, sizeof(_sg.d3d11.bnd.vs_smps));
  12742. _sg_clear(&_sg.d3d11.bnd.fs_smps, sizeof(_sg.d3d11.bnd.fs_smps));
  12743. }
  12744. // gather all the D3D11 resources into arrays
  12745. ID3D11Buffer* d3d11_ib = bnd->ib ? bnd->ib->d3d11.buf : 0;
  12746. if (is_compute) {
  12747. // on D3D11 we need to break a chicken-egg-situation where a resource
  12748. // may still be set as shader resource view, but is going to be set
  12749. // as unordered-access-view, so first clear all shader resource view bindings
  12750. _sg_d3d11_CSSetShaderResources(_sg.d3d11.ctx, 0, _SG_D3D11_MAX_STAGE_SRV_BINDINGS, _sg.d3d11.bnd.cs_srvs);
  12751. } else {
  12752. for (size_t i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  12753. const _sg_buffer_t* vb = bnd->vbs[i];
  12754. if (vb == 0) {
  12755. continue;
  12756. }
  12757. SOKOL_ASSERT(vb->d3d11.buf);
  12758. _sg.d3d11.bnd.vbs[i] = vb->d3d11.buf;
  12759. _sg.d3d11.bnd.vb_offsets[i] = (UINT)bnd->vb_offsets[i];
  12760. }
  12761. }
  12762. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  12763. const _sg_view_t* view = bnd->views[i];
  12764. if (0 == view) {
  12765. continue;
  12766. }
  12767. const _sg_shader_view_t* shd_view = &shd->cmn.views[i];
  12768. const sg_shader_stage stage = shd_view->stage;
  12769. SOKOL_ASSERT((stage == SG_SHADERSTAGE_VERTEX)
  12770. || (stage == SG_SHADERSTAGE_FRAGMENT)
  12771. || (stage == SG_SHADERSTAGE_COMPUTE));
  12772. SOKOL_ASSERT((shd_view->view_type == SG_VIEWTYPE_TEXTURE)
  12773. || (shd_view->view_type == SG_VIEWTYPE_STORAGEBUFFER)
  12774. || (shd_view->view_type == SG_VIEWTYPE_STORAGEIMAGE));
  12775. if (shd_view->view_type == SG_VIEWTYPE_TEXTURE) {
  12776. const uint8_t d3d11_slot = shd->d3d11.view_register_t_n[i];
  12777. SOKOL_ASSERT(d3d11_slot < _SG_D3D11_MAX_STAGE_SRV_BINDINGS);
  12778. ID3D11ShaderResourceView* d3d11_srv = view->d3d11.srv;
  12779. SOKOL_ASSERT(d3d11_srv);
  12780. switch (stage) {
  12781. case SG_SHADERSTAGE_VERTEX: _sg.d3d11.bnd.vs_srvs[d3d11_slot] = d3d11_srv; break;
  12782. case SG_SHADERSTAGE_FRAGMENT: _sg.d3d11.bnd.fs_srvs[d3d11_slot] = d3d11_srv; break;
  12783. case SG_SHADERSTAGE_COMPUTE: _sg.d3d11.bnd.cs_srvs[d3d11_slot] = d3d11_srv; break;
  12784. default: SOKOL_UNREACHABLE;
  12785. }
  12786. } else if (shd_view->view_type == SG_VIEWTYPE_STORAGEBUFFER) {
  12787. if (shd->cmn.views[i].sbuf_readonly) {
  12788. const uint8_t d3d11_slot = shd->d3d11.view_register_t_n[i];
  12789. SOKOL_ASSERT(d3d11_slot < _SG_D3D11_MAX_STAGE_SRV_BINDINGS);
  12790. ID3D11ShaderResourceView* d3d11_srv = view->d3d11.srv;
  12791. SOKOL_ASSERT(d3d11_srv);
  12792. switch (stage) {
  12793. case SG_SHADERSTAGE_VERTEX: _sg.d3d11.bnd.vs_srvs[d3d11_slot] = d3d11_srv; break;
  12794. case SG_SHADERSTAGE_FRAGMENT: _sg.d3d11.bnd.fs_srvs[d3d11_slot] = d3d11_srv; break;
  12795. case SG_SHADERSTAGE_COMPUTE: _sg.d3d11.bnd.cs_srvs[d3d11_slot] = d3d11_srv; break;
  12796. default: SOKOL_UNREACHABLE;
  12797. }
  12798. } else {
  12799. SOKOL_ASSERT(stage == SG_SHADERSTAGE_COMPUTE);
  12800. const uint8_t d3d11_slot = shd->d3d11.view_register_u_n[i];
  12801. SOKOL_ASSERT(d3d11_slot < _sg.limits.d3d11_max_unordered_access_views);
  12802. ID3D11UnorderedAccessView* d3d11_uav = view->d3d11.uav;
  12803. SOKOL_ASSERT(d3d11_uav);
  12804. _sg.d3d11.bnd.cs_uavs[d3d11_slot] = d3d11_uav;
  12805. }
  12806. } else if (shd_view->view_type == SG_VIEWTYPE_STORAGEIMAGE) {
  12807. SOKOL_ASSERT(stage == SG_SHADERSTAGE_COMPUTE);
  12808. const uint8_t d3d11_slot = shd->d3d11.view_register_u_n[i];
  12809. SOKOL_ASSERT(d3d11_slot < _sg.limits.d3d11_max_unordered_access_views);
  12810. ID3D11UnorderedAccessView* d3d11_uav = view->d3d11.uav;
  12811. SOKOL_ASSERT(d3d11_uav);
  12812. _sg.d3d11.bnd.cs_uavs[d3d11_slot] = d3d11_uav;
  12813. } else SOKOL_UNREACHABLE;
  12814. }
  12815. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  12816. const _sg_sampler_t* smp = bnd->smps[i];
  12817. if (smp == 0) {
  12818. continue;
  12819. }
  12820. const sg_shader_stage stage = shd->cmn.samplers[i].stage;
  12821. SOKOL_ASSERT(stage != SG_SHADERSTAGE_NONE);
  12822. const uint8_t d3d11_slot = shd->d3d11.smp_register_s_n[i];
  12823. SOKOL_ASSERT(d3d11_slot < _SG_D3D11_MAX_STAGE_SMP_BINDINGS);
  12824. SOKOL_ASSERT(smp->d3d11.smp);
  12825. ID3D11SamplerState* d3d11_smp = smp->d3d11.smp;
  12826. switch (stage) {
  12827. case SG_SHADERSTAGE_VERTEX: _sg.d3d11.bnd.vs_smps[d3d11_slot] = d3d11_smp; break;
  12828. case SG_SHADERSTAGE_FRAGMENT: _sg.d3d11.bnd.fs_smps[d3d11_slot] = d3d11_smp; break;
  12829. case SG_SHADERSTAGE_COMPUTE: _sg.d3d11.bnd.cs_smps[d3d11_slot] = d3d11_smp; break;
  12830. default: SOKOL_UNREACHABLE;
  12831. }
  12832. }
  12833. if (is_compute) {
  12834. SOKOL_ASSERT(_sg.limits.d3d11_max_unordered_access_views <= _SG_D3D11_MAX_STAGE_UAV_BINDINGS);
  12835. _sg_d3d11_CSSetUnorderedAccessViews(_sg.d3d11.ctx, 0, _sg.limits.d3d11_max_unordered_access_views, _sg.d3d11.bnd.cs_uavs, NULL);
  12836. _sg_d3d11_CSSetShaderResources(_sg.d3d11.ctx, 0, _SG_D3D11_MAX_STAGE_SRV_BINDINGS, _sg.d3d11.bnd.cs_srvs);
  12837. _sg_d3d11_CSSetSamplers(_sg.d3d11.ctx, 0, _SG_D3D11_MAX_STAGE_SMP_BINDINGS, _sg.d3d11.bnd.cs_smps);
  12838. _sg_stats_inc(d3d11.bindings.num_cs_set_shader_resources);
  12839. _sg_stats_inc(d3d11.bindings.num_cs_set_samplers);
  12840. _sg_stats_inc(d3d11.bindings.num_cs_set_unordered_access_views);
  12841. } else {
  12842. _sg_d3d11_IASetVertexBuffers(_sg.d3d11.ctx, 0, SG_MAX_VERTEXBUFFER_BINDSLOTS, _sg.d3d11.bnd.vbs, bnd->pip->d3d11.vb_strides, _sg.d3d11.bnd.vb_offsets);
  12843. _sg_d3d11_IASetIndexBuffer(_sg.d3d11.ctx, d3d11_ib, bnd->pip->d3d11.index_format, (UINT)bnd->ib_offset);
  12844. _sg_d3d11_VSSetShaderResources(_sg.d3d11.ctx, 0, _SG_D3D11_MAX_STAGE_SRV_BINDINGS, _sg.d3d11.bnd.vs_srvs);
  12845. _sg_d3d11_PSSetShaderResources(_sg.d3d11.ctx, 0, _SG_D3D11_MAX_STAGE_SRV_BINDINGS, _sg.d3d11.bnd.fs_srvs);
  12846. _sg_d3d11_VSSetSamplers(_sg.d3d11.ctx, 0, _SG_D3D11_MAX_STAGE_SMP_BINDINGS, _sg.d3d11.bnd.vs_smps);
  12847. _sg_d3d11_PSSetSamplers(_sg.d3d11.ctx, 0, _SG_D3D11_MAX_STAGE_SMP_BINDINGS, _sg.d3d11.bnd.fs_smps);
  12848. _sg_stats_inc(d3d11.bindings.num_ia_set_vertex_buffers);
  12849. _sg_stats_inc(d3d11.bindings.num_ia_set_index_buffer);
  12850. _sg_stats_inc(d3d11.bindings.num_vs_set_shader_resources);
  12851. _sg_stats_inc(d3d11.bindings.num_ps_set_shader_resources);
  12852. _sg_stats_inc(d3d11.bindings.num_vs_set_samplers);
  12853. _sg_stats_inc(d3d11.bindings.num_ps_set_samplers);
  12854. }
  12855. return true;
  12856. }
  12857. _SOKOL_PRIVATE void _sg_d3d11_apply_uniforms(int ub_slot, const sg_range* data) {
  12858. SOKOL_ASSERT(_sg.d3d11.ctx);
  12859. SOKOL_ASSERT((ub_slot >= 0) && (ub_slot < SG_MAX_UNIFORMBLOCK_BINDSLOTS));
  12860. const _sg_pipeline_t* pip = _sg_pipeline_ref_ptr(&_sg.cur_pip);
  12861. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  12862. SOKOL_ASSERT(data->size == shd->cmn.uniform_blocks[ub_slot].size);
  12863. ID3D11Buffer* cbuf = shd->d3d11.all_cbufs[ub_slot];
  12864. SOKOL_ASSERT(cbuf);
  12865. _sg_d3d11_UpdateSubresource(_sg.d3d11.ctx, (ID3D11Resource*)cbuf, 0, NULL, data->ptr, 0, 0);
  12866. _sg_stats_inc(d3d11.uniforms.num_update_subresource);
  12867. }
  12868. _SOKOL_PRIVATE void _sg_d3d11_draw(int base_element, int num_elements, int num_instances, int base_vertex, int base_instance) {
  12869. const bool use_instanced_draw = (num_instances > 1) || (_sg.use_instanced_draw);
  12870. if (_sg.use_indexed_draw) {
  12871. if (use_instanced_draw) {
  12872. _sg_d3d11_DrawIndexedInstanced(_sg.d3d11.ctx,
  12873. (UINT)num_elements,
  12874. (UINT)num_instances,
  12875. (UINT)base_element,
  12876. base_vertex,
  12877. (UINT)base_instance);
  12878. _sg_stats_inc(d3d11.draw.num_draw_indexed_instanced);
  12879. } else {
  12880. _sg_d3d11_DrawIndexed(_sg.d3d11.ctx, (UINT)num_elements, (UINT)base_element, base_vertex);
  12881. _sg_stats_inc(d3d11.draw.num_draw_indexed);
  12882. }
  12883. } else {
  12884. if (use_instanced_draw) {
  12885. _sg_d3d11_DrawInstanced(_sg.d3d11.ctx,
  12886. (UINT)num_elements,
  12887. (UINT)num_instances,
  12888. (UINT)base_element,
  12889. (UINT)base_instance);
  12890. _sg_stats_inc(d3d11.draw.num_draw_instanced);
  12891. } else {
  12892. _sg_d3d11_Draw(_sg.d3d11.ctx, (UINT)num_elements, (UINT)base_element);
  12893. _sg_stats_inc(d3d11.draw.num_draw);
  12894. }
  12895. }
  12896. }
  12897. _SOKOL_PRIVATE void _sg_d3d11_dispatch(int num_groups_x, int num_groups_y, int num_groups_z) {
  12898. _sg_d3d11_Dispatch(_sg.d3d11.ctx, (UINT)num_groups_x, (UINT)num_groups_y, (UINT)num_groups_z);
  12899. }
  12900. _SOKOL_PRIVATE void _sg_d3d11_commit(void) {
  12901. // empty
  12902. }
  12903. _SOKOL_PRIVATE void _sg_d3d11_update_buffer(_sg_buffer_t* buf, const sg_range* data) {
  12904. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  12905. SOKOL_ASSERT(_sg.d3d11.ctx);
  12906. SOKOL_ASSERT(buf->d3d11.buf);
  12907. D3D11_MAPPED_SUBRESOURCE d3d11_msr;
  12908. HRESULT hr = _sg_d3d11_Map(_sg.d3d11.ctx, (ID3D11Resource*)buf->d3d11.buf, 0, D3D11_MAP_WRITE_DISCARD, 0, &d3d11_msr);
  12909. _sg_stats_inc(d3d11.num_map);
  12910. if (SUCCEEDED(hr)) {
  12911. memcpy(d3d11_msr.pData, data->ptr, data->size);
  12912. _sg_d3d11_Unmap(_sg.d3d11.ctx, (ID3D11Resource*)buf->d3d11.buf, 0);
  12913. _sg_stats_inc(d3d11.num_unmap);
  12914. } else {
  12915. _SG_ERROR(D3D11_MAP_FOR_UPDATE_BUFFER_FAILED);
  12916. }
  12917. }
  12918. _SOKOL_PRIVATE void _sg_d3d11_append_buffer(_sg_buffer_t* buf, const sg_range* data, bool new_frame) {
  12919. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  12920. SOKOL_ASSERT(_sg.d3d11.ctx);
  12921. SOKOL_ASSERT(buf->d3d11.buf);
  12922. D3D11_MAP map_type = new_frame ? D3D11_MAP_WRITE_DISCARD : D3D11_MAP_WRITE_NO_OVERWRITE;
  12923. D3D11_MAPPED_SUBRESOURCE d3d11_msr;
  12924. HRESULT hr = _sg_d3d11_Map(_sg.d3d11.ctx, (ID3D11Resource*)buf->d3d11.buf, 0, map_type, 0, &d3d11_msr);
  12925. _sg_stats_inc(d3d11.num_map);
  12926. if (SUCCEEDED(hr)) {
  12927. uint8_t* dst_ptr = (uint8_t*)d3d11_msr.pData + buf->cmn.append_pos;
  12928. memcpy(dst_ptr, data->ptr, data->size);
  12929. _sg_d3d11_Unmap(_sg.d3d11.ctx, (ID3D11Resource*)buf->d3d11.buf, 0);
  12930. _sg_stats_inc(d3d11.num_unmap);
  12931. } else {
  12932. _SG_ERROR(D3D11_MAP_FOR_APPEND_BUFFER_FAILED);
  12933. }
  12934. }
  12935. // see: https://learn.microsoft.com/en-us/windows/win32/direct3d11/overviews-direct3d-11-resources-subresources
  12936. // also see: https://learn.microsoft.com/en-us/windows/win32/api/d3d11/nf-d3d11-d3d11calcsubresource
  12937. _SOKOL_PRIVATE void _sg_d3d11_update_image(_sg_image_t* img, const sg_image_data* data) {
  12938. SOKOL_ASSERT(img && data);
  12939. SOKOL_ASSERT(_sg.d3d11.ctx);
  12940. SOKOL_ASSERT(img->d3d11.res);
  12941. const int num_slices = (img->cmn.type == SG_IMAGETYPE_3D) ? 1 : img->cmn.num_slices;
  12942. const int num_depth_slices = (img->cmn.type == SG_IMAGETYPE_3D) ? img->cmn.num_slices : 1;
  12943. UINT subres_index = 0;
  12944. HRESULT hr;
  12945. D3D11_MAPPED_SUBRESOURCE d3d11_msr;
  12946. for (int slice_index = 0; slice_index < num_slices; slice_index++) {
  12947. for (int mip_index = 0; mip_index < img->cmn.num_mipmaps; mip_index++, subres_index++) {
  12948. SOKOL_ASSERT(subres_index < _SG_D3D11_MAX_TEXTURE_SUBRESOURCES);
  12949. const int mip_width = _sg_miplevel_dim(img->cmn.width, mip_index);
  12950. const int mip_height = _sg_miplevel_dim(img->cmn.height, mip_index);
  12951. const int src_row_pitch = _sg_row_pitch(img->cmn.pixel_format, mip_width, 1);
  12952. const int src_depth_pitch = _sg_surface_pitch(img->cmn.pixel_format, mip_width, mip_height, 1);
  12953. const sg_range* miplevel_data = &(data->mip_levels[mip_index]);
  12954. const size_t slice_size = miplevel_data->size / (size_t)num_slices;
  12955. SOKOL_ASSERT(slice_size == (size_t)(src_depth_pitch * num_depth_slices));
  12956. const size_t slice_offset = slice_size * (size_t)slice_index;
  12957. const uint8_t* slice_ptr = ((const uint8_t*)miplevel_data->ptr) + slice_offset;
  12958. hr = _sg_d3d11_Map(_sg.d3d11.ctx, img->d3d11.res, subres_index, D3D11_MAP_WRITE_DISCARD, 0, &d3d11_msr);
  12959. _sg_stats_inc(d3d11.num_map);
  12960. if (SUCCEEDED(hr)) {
  12961. const uint8_t* src_ptr = slice_ptr;
  12962. uint8_t* dst_ptr = (uint8_t*)d3d11_msr.pData;
  12963. for (int depth_index = 0; depth_index < num_depth_slices; depth_index++) {
  12964. if (src_row_pitch == (int)d3d11_msr.RowPitch) {
  12965. const size_t copy_size = slice_size / (size_t)num_depth_slices;
  12966. SOKOL_ASSERT((copy_size * (size_t)num_depth_slices) == slice_size);
  12967. memcpy(dst_ptr, src_ptr, copy_size);
  12968. } else {
  12969. SOKOL_ASSERT(src_row_pitch < (int)d3d11_msr.RowPitch);
  12970. const uint8_t* src_row_ptr = src_ptr;
  12971. uint8_t* dst_row_ptr = dst_ptr;
  12972. for (int row_index = 0; row_index < mip_height; row_index++) {
  12973. memcpy(dst_row_ptr, src_row_ptr, (size_t)src_row_pitch);
  12974. src_row_ptr += src_row_pitch;
  12975. dst_row_ptr += d3d11_msr.RowPitch;
  12976. }
  12977. }
  12978. src_ptr += src_depth_pitch;
  12979. dst_ptr += d3d11_msr.DepthPitch;
  12980. }
  12981. _sg_d3d11_Unmap(_sg.d3d11.ctx, img->d3d11.res, subres_index);
  12982. _sg_stats_inc(d3d11.num_unmap);
  12983. } else {
  12984. _SG_ERROR(D3D11_MAP_FOR_UPDATE_IMAGE_FAILED);
  12985. }
  12986. }
  12987. }
  12988. }
  12989. // ███ ███ ███████ ████████ █████ ██ ██████ █████ ██████ ██ ██ ███████ ███ ██ ██████
  12990. // ████ ████ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ ██ ██
  12991. // ██ ████ ██ █████ ██ ███████ ██ ██████ ███████ ██ █████ █████ ██ ██ ██ ██ ██
  12992. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  12993. // ██ ██ ███████ ██ ██ ██ ███████ ██████ ██ ██ ██████ ██ ██ ███████ ██ ████ ██████
  12994. //
  12995. // >>metal backend
  12996. #elif defined(SOKOL_METAL)
  12997. #if __has_feature(objc_arc)
  12998. #define _SG_OBJC_RETAIN(obj) { }
  12999. #define _SG_OBJC_RELEASE(obj) { obj = nil; }
  13000. #else
  13001. #define _SG_OBJC_RETAIN(obj) { [obj retain]; }
  13002. #define _SG_OBJC_RELEASE(obj) { [obj release]; obj = nil; }
  13003. #endif
  13004. //-- enum translation functions ------------------------------------------------
  13005. _SOKOL_PRIVATE MTLLoadAction _sg_mtl_load_action(sg_load_action a) {
  13006. switch (a) {
  13007. case SG_LOADACTION_CLEAR: return MTLLoadActionClear;
  13008. case SG_LOADACTION_LOAD: return MTLLoadActionLoad;
  13009. case SG_LOADACTION_DONTCARE: return MTLLoadActionDontCare;
  13010. default: SOKOL_UNREACHABLE; return (MTLLoadAction)0;
  13011. }
  13012. }
  13013. _SOKOL_PRIVATE MTLStoreAction _sg_mtl_store_action(sg_store_action a, bool resolve) {
  13014. switch (a) {
  13015. case SG_STOREACTION_STORE:
  13016. if (resolve) {
  13017. return MTLStoreActionStoreAndMultisampleResolve;
  13018. } else {
  13019. return MTLStoreActionStore;
  13020. }
  13021. break;
  13022. case SG_STOREACTION_DONTCARE:
  13023. if (resolve) {
  13024. return MTLStoreActionMultisampleResolve;
  13025. } else {
  13026. return MTLStoreActionDontCare;
  13027. }
  13028. break;
  13029. default: SOKOL_UNREACHABLE; return (MTLStoreAction)0;
  13030. }
  13031. }
  13032. _SOKOL_PRIVATE MTLResourceOptions _sg_mtl_resource_options_storage_mode_managed_or_shared(void) {
  13033. #if defined(_SG_TARGET_MACOS)
  13034. if (_sg.mtl.use_shared_storage_mode) {
  13035. return MTLResourceStorageModeShared;
  13036. } else {
  13037. return MTLResourceStorageModeManaged;
  13038. }
  13039. #else
  13040. // MTLResourceStorageModeManaged is not even defined on iOS SDK
  13041. return MTLResourceStorageModeShared;
  13042. #endif
  13043. }
  13044. _SOKOL_PRIVATE MTLResourceOptions _sg_mtl_buffer_resource_options(const sg_buffer_usage* usage) {
  13045. if (usage->immutable) {
  13046. return _sg_mtl_resource_options_storage_mode_managed_or_shared();
  13047. } else {
  13048. return MTLResourceCPUCacheModeWriteCombined | _sg_mtl_resource_options_storage_mode_managed_or_shared();
  13049. }
  13050. }
  13051. _SOKOL_PRIVATE MTLVertexStepFunction _sg_mtl_step_function(sg_vertex_step step) {
  13052. switch (step) {
  13053. case SG_VERTEXSTEP_PER_VERTEX: return MTLVertexStepFunctionPerVertex;
  13054. case SG_VERTEXSTEP_PER_INSTANCE: return MTLVertexStepFunctionPerInstance;
  13055. default: SOKOL_UNREACHABLE; return (MTLVertexStepFunction)0;
  13056. }
  13057. }
  13058. _SOKOL_PRIVATE MTLVertexFormat _sg_mtl_vertex_format(sg_vertex_format fmt) {
  13059. switch (fmt) {
  13060. case SG_VERTEXFORMAT_FLOAT: return MTLVertexFormatFloat;
  13061. case SG_VERTEXFORMAT_FLOAT2: return MTLVertexFormatFloat2;
  13062. case SG_VERTEXFORMAT_FLOAT3: return MTLVertexFormatFloat3;
  13063. case SG_VERTEXFORMAT_FLOAT4: return MTLVertexFormatFloat4;
  13064. case SG_VERTEXFORMAT_INT: return MTLVertexFormatInt;
  13065. case SG_VERTEXFORMAT_INT2: return MTLVertexFormatInt2;
  13066. case SG_VERTEXFORMAT_INT3: return MTLVertexFormatInt3;
  13067. case SG_VERTEXFORMAT_INT4: return MTLVertexFormatInt4;
  13068. case SG_VERTEXFORMAT_UINT: return MTLVertexFormatUInt;
  13069. case SG_VERTEXFORMAT_UINT2: return MTLVertexFormatUInt2;
  13070. case SG_VERTEXFORMAT_UINT3: return MTLVertexFormatUInt3;
  13071. case SG_VERTEXFORMAT_UINT4: return MTLVertexFormatUInt4;
  13072. case SG_VERTEXFORMAT_BYTE4: return MTLVertexFormatChar4;
  13073. case SG_VERTEXFORMAT_BYTE4N: return MTLVertexFormatChar4Normalized;
  13074. case SG_VERTEXFORMAT_UBYTE4: return MTLVertexFormatUChar4;
  13075. case SG_VERTEXFORMAT_UBYTE4N: return MTLVertexFormatUChar4Normalized;
  13076. case SG_VERTEXFORMAT_SHORT2: return MTLVertexFormatShort2;
  13077. case SG_VERTEXFORMAT_SHORT2N: return MTLVertexFormatShort2Normalized;
  13078. case SG_VERTEXFORMAT_USHORT2: return MTLVertexFormatUShort2;
  13079. case SG_VERTEXFORMAT_USHORT2N: return MTLVertexFormatUShort2Normalized;
  13080. case SG_VERTEXFORMAT_SHORT4: return MTLVertexFormatShort4;
  13081. case SG_VERTEXFORMAT_SHORT4N: return MTLVertexFormatShort4Normalized;
  13082. case SG_VERTEXFORMAT_USHORT4: return MTLVertexFormatUShort4;
  13083. case SG_VERTEXFORMAT_USHORT4N: return MTLVertexFormatUShort4Normalized;
  13084. case SG_VERTEXFORMAT_UINT10_N2: return MTLVertexFormatUInt1010102Normalized;
  13085. case SG_VERTEXFORMAT_HALF2: return MTLVertexFormatHalf2;
  13086. case SG_VERTEXFORMAT_HALF4: return MTLVertexFormatHalf4;
  13087. default: SOKOL_UNREACHABLE; return (MTLVertexFormat)0;
  13088. }
  13089. }
  13090. _SOKOL_PRIVATE MTLPrimitiveType _sg_mtl_primitive_type(sg_primitive_type t) {
  13091. switch (t) {
  13092. case SG_PRIMITIVETYPE_POINTS: return MTLPrimitiveTypePoint;
  13093. case SG_PRIMITIVETYPE_LINES: return MTLPrimitiveTypeLine;
  13094. case SG_PRIMITIVETYPE_LINE_STRIP: return MTLPrimitiveTypeLineStrip;
  13095. case SG_PRIMITIVETYPE_TRIANGLES: return MTLPrimitiveTypeTriangle;
  13096. case SG_PRIMITIVETYPE_TRIANGLE_STRIP: return MTLPrimitiveTypeTriangleStrip;
  13097. default: SOKOL_UNREACHABLE; return (MTLPrimitiveType)0;
  13098. }
  13099. }
  13100. _SOKOL_PRIVATE MTLPixelFormat _sg_mtl_pixel_format(sg_pixel_format fmt) {
  13101. switch (fmt) {
  13102. case SG_PIXELFORMAT_R8: return MTLPixelFormatR8Unorm;
  13103. case SG_PIXELFORMAT_R8SN: return MTLPixelFormatR8Snorm;
  13104. case SG_PIXELFORMAT_R8UI: return MTLPixelFormatR8Uint;
  13105. case SG_PIXELFORMAT_R8SI: return MTLPixelFormatR8Sint;
  13106. case SG_PIXELFORMAT_R16: return MTLPixelFormatR16Unorm;
  13107. case SG_PIXELFORMAT_R16SN: return MTLPixelFormatR16Snorm;
  13108. case SG_PIXELFORMAT_R16UI: return MTLPixelFormatR16Uint;
  13109. case SG_PIXELFORMAT_R16SI: return MTLPixelFormatR16Sint;
  13110. case SG_PIXELFORMAT_R16F: return MTLPixelFormatR16Float;
  13111. case SG_PIXELFORMAT_RG8: return MTLPixelFormatRG8Unorm;
  13112. case SG_PIXELFORMAT_RG8SN: return MTLPixelFormatRG8Snorm;
  13113. case SG_PIXELFORMAT_RG8UI: return MTLPixelFormatRG8Uint;
  13114. case SG_PIXELFORMAT_RG8SI: return MTLPixelFormatRG8Sint;
  13115. case SG_PIXELFORMAT_R32UI: return MTLPixelFormatR32Uint;
  13116. case SG_PIXELFORMAT_R32SI: return MTLPixelFormatR32Sint;
  13117. case SG_PIXELFORMAT_R32F: return MTLPixelFormatR32Float;
  13118. case SG_PIXELFORMAT_RG16: return MTLPixelFormatRG16Unorm;
  13119. case SG_PIXELFORMAT_RG16SN: return MTLPixelFormatRG16Snorm;
  13120. case SG_PIXELFORMAT_RG16UI: return MTLPixelFormatRG16Uint;
  13121. case SG_PIXELFORMAT_RG16SI: return MTLPixelFormatRG16Sint;
  13122. case SG_PIXELFORMAT_RG16F: return MTLPixelFormatRG16Float;
  13123. case SG_PIXELFORMAT_RGBA8: return MTLPixelFormatRGBA8Unorm;
  13124. case SG_PIXELFORMAT_SRGB8A8: return MTLPixelFormatRGBA8Unorm_sRGB;
  13125. case SG_PIXELFORMAT_RGBA8SN: return MTLPixelFormatRGBA8Snorm;
  13126. case SG_PIXELFORMAT_RGBA8UI: return MTLPixelFormatRGBA8Uint;
  13127. case SG_PIXELFORMAT_RGBA8SI: return MTLPixelFormatRGBA8Sint;
  13128. case SG_PIXELFORMAT_BGRA8: return MTLPixelFormatBGRA8Unorm;
  13129. case SG_PIXELFORMAT_RGB10A2: return MTLPixelFormatRGB10A2Unorm;
  13130. case SG_PIXELFORMAT_RG11B10F: return MTLPixelFormatRG11B10Float;
  13131. case SG_PIXELFORMAT_RGB9E5: return MTLPixelFormatRGB9E5Float;
  13132. case SG_PIXELFORMAT_RG32UI: return MTLPixelFormatRG32Uint;
  13133. case SG_PIXELFORMAT_RG32SI: return MTLPixelFormatRG32Sint;
  13134. case SG_PIXELFORMAT_RG32F: return MTLPixelFormatRG32Float;
  13135. case SG_PIXELFORMAT_RGBA16: return MTLPixelFormatRGBA16Unorm;
  13136. case SG_PIXELFORMAT_RGBA16SN: return MTLPixelFormatRGBA16Snorm;
  13137. case SG_PIXELFORMAT_RGBA16UI: return MTLPixelFormatRGBA16Uint;
  13138. case SG_PIXELFORMAT_RGBA16SI: return MTLPixelFormatRGBA16Sint;
  13139. case SG_PIXELFORMAT_RGBA16F: return MTLPixelFormatRGBA16Float;
  13140. case SG_PIXELFORMAT_RGBA32UI: return MTLPixelFormatRGBA32Uint;
  13141. case SG_PIXELFORMAT_RGBA32SI: return MTLPixelFormatRGBA32Sint;
  13142. case SG_PIXELFORMAT_RGBA32F: return MTLPixelFormatRGBA32Float;
  13143. case SG_PIXELFORMAT_DEPTH: return MTLPixelFormatDepth32Float;
  13144. case SG_PIXELFORMAT_DEPTH_STENCIL: return MTLPixelFormatDepth32Float_Stencil8;
  13145. #if defined(_SG_TARGET_MACOS)
  13146. case SG_PIXELFORMAT_BC1_RGBA: return MTLPixelFormatBC1_RGBA;
  13147. case SG_PIXELFORMAT_BC2_RGBA: return MTLPixelFormatBC2_RGBA;
  13148. case SG_PIXELFORMAT_BC3_RGBA: return MTLPixelFormatBC3_RGBA;
  13149. case SG_PIXELFORMAT_BC3_SRGBA: return MTLPixelFormatBC3_RGBA_sRGB;
  13150. case SG_PIXELFORMAT_BC4_R: return MTLPixelFormatBC4_RUnorm;
  13151. case SG_PIXELFORMAT_BC4_RSN: return MTLPixelFormatBC4_RSnorm;
  13152. case SG_PIXELFORMAT_BC5_RG: return MTLPixelFormatBC5_RGUnorm;
  13153. case SG_PIXELFORMAT_BC5_RGSN: return MTLPixelFormatBC5_RGSnorm;
  13154. case SG_PIXELFORMAT_BC6H_RGBF: return MTLPixelFormatBC6H_RGBFloat;
  13155. case SG_PIXELFORMAT_BC6H_RGBUF: return MTLPixelFormatBC6H_RGBUfloat;
  13156. case SG_PIXELFORMAT_BC7_RGBA: return MTLPixelFormatBC7_RGBAUnorm;
  13157. case SG_PIXELFORMAT_BC7_SRGBA: return MTLPixelFormatBC7_RGBAUnorm_sRGB;
  13158. #else
  13159. case SG_PIXELFORMAT_ETC2_RGB8: return MTLPixelFormatETC2_RGB8;
  13160. case SG_PIXELFORMAT_ETC2_SRGB8: return MTLPixelFormatETC2_RGB8_sRGB;
  13161. case SG_PIXELFORMAT_ETC2_RGB8A1: return MTLPixelFormatETC2_RGB8A1;
  13162. case SG_PIXELFORMAT_ETC2_RGBA8: return MTLPixelFormatEAC_RGBA8;
  13163. case SG_PIXELFORMAT_ETC2_SRGB8A8: return MTLPixelFormatEAC_RGBA8_sRGB;
  13164. case SG_PIXELFORMAT_EAC_R11: return MTLPixelFormatEAC_R11Unorm;
  13165. case SG_PIXELFORMAT_EAC_R11SN: return MTLPixelFormatEAC_R11Snorm;
  13166. case SG_PIXELFORMAT_EAC_RG11: return MTLPixelFormatEAC_RG11Unorm;
  13167. case SG_PIXELFORMAT_EAC_RG11SN: return MTLPixelFormatEAC_RG11Snorm;
  13168. case SG_PIXELFORMAT_ASTC_4x4_RGBA: return MTLPixelFormatASTC_4x4_LDR;
  13169. case SG_PIXELFORMAT_ASTC_4x4_SRGBA: return MTLPixelFormatASTC_4x4_sRGB;
  13170. #endif
  13171. default: return MTLPixelFormatInvalid;
  13172. }
  13173. }
  13174. _SOKOL_PRIVATE MTLColorWriteMask _sg_mtl_color_write_mask(sg_color_mask m) {
  13175. MTLColorWriteMask mtl_mask = MTLColorWriteMaskNone;
  13176. if (m & SG_COLORMASK_R) {
  13177. mtl_mask |= MTLColorWriteMaskRed;
  13178. }
  13179. if (m & SG_COLORMASK_G) {
  13180. mtl_mask |= MTLColorWriteMaskGreen;
  13181. }
  13182. if (m & SG_COLORMASK_B) {
  13183. mtl_mask |= MTLColorWriteMaskBlue;
  13184. }
  13185. if (m & SG_COLORMASK_A) {
  13186. mtl_mask |= MTLColorWriteMaskAlpha;
  13187. }
  13188. return mtl_mask;
  13189. }
  13190. _SOKOL_PRIVATE MTLBlendOperation _sg_mtl_blend_op(sg_blend_op op) {
  13191. switch (op) {
  13192. case SG_BLENDOP_ADD: return MTLBlendOperationAdd;
  13193. case SG_BLENDOP_SUBTRACT: return MTLBlendOperationSubtract;
  13194. case SG_BLENDOP_REVERSE_SUBTRACT: return MTLBlendOperationReverseSubtract;
  13195. case SG_BLENDOP_MIN: return MTLBlendOperationMin;
  13196. case SG_BLENDOP_MAX: return MTLBlendOperationMax;
  13197. default: SOKOL_UNREACHABLE; return (MTLBlendOperation)0;
  13198. }
  13199. }
  13200. _SOKOL_PRIVATE MTLBlendFactor _sg_mtl_blend_factor(sg_blend_factor f) {
  13201. switch (f) {
  13202. case SG_BLENDFACTOR_ZERO: return MTLBlendFactorZero;
  13203. case SG_BLENDFACTOR_ONE: return MTLBlendFactorOne;
  13204. case SG_BLENDFACTOR_SRC_COLOR: return MTLBlendFactorSourceColor;
  13205. case SG_BLENDFACTOR_ONE_MINUS_SRC_COLOR: return MTLBlendFactorOneMinusSourceColor;
  13206. case SG_BLENDFACTOR_SRC_ALPHA: return MTLBlendFactorSourceAlpha;
  13207. case SG_BLENDFACTOR_ONE_MINUS_SRC_ALPHA: return MTLBlendFactorOneMinusSourceAlpha;
  13208. case SG_BLENDFACTOR_DST_COLOR: return MTLBlendFactorDestinationColor;
  13209. case SG_BLENDFACTOR_ONE_MINUS_DST_COLOR: return MTLBlendFactorOneMinusDestinationColor;
  13210. case SG_BLENDFACTOR_DST_ALPHA: return MTLBlendFactorDestinationAlpha;
  13211. case SG_BLENDFACTOR_ONE_MINUS_DST_ALPHA: return MTLBlendFactorOneMinusDestinationAlpha;
  13212. case SG_BLENDFACTOR_SRC_ALPHA_SATURATED: return MTLBlendFactorSourceAlphaSaturated;
  13213. case SG_BLENDFACTOR_BLEND_COLOR: return MTLBlendFactorBlendColor;
  13214. case SG_BLENDFACTOR_ONE_MINUS_BLEND_COLOR: return MTLBlendFactorOneMinusBlendColor;
  13215. case SG_BLENDFACTOR_BLEND_ALPHA: return MTLBlendFactorBlendAlpha;
  13216. case SG_BLENDFACTOR_ONE_MINUS_BLEND_ALPHA: return MTLBlendFactorOneMinusBlendAlpha;
  13217. default: SOKOL_UNREACHABLE; return (MTLBlendFactor)0;
  13218. }
  13219. }
  13220. _SOKOL_PRIVATE MTLCompareFunction _sg_mtl_compare_func(sg_compare_func f) {
  13221. switch (f) {
  13222. case SG_COMPAREFUNC_NEVER: return MTLCompareFunctionNever;
  13223. case SG_COMPAREFUNC_LESS: return MTLCompareFunctionLess;
  13224. case SG_COMPAREFUNC_EQUAL: return MTLCompareFunctionEqual;
  13225. case SG_COMPAREFUNC_LESS_EQUAL: return MTLCompareFunctionLessEqual;
  13226. case SG_COMPAREFUNC_GREATER: return MTLCompareFunctionGreater;
  13227. case SG_COMPAREFUNC_NOT_EQUAL: return MTLCompareFunctionNotEqual;
  13228. case SG_COMPAREFUNC_GREATER_EQUAL: return MTLCompareFunctionGreaterEqual;
  13229. case SG_COMPAREFUNC_ALWAYS: return MTLCompareFunctionAlways;
  13230. default: SOKOL_UNREACHABLE; return (MTLCompareFunction)0;
  13231. }
  13232. }
  13233. _SOKOL_PRIVATE MTLStencilOperation _sg_mtl_stencil_op(sg_stencil_op op) {
  13234. switch (op) {
  13235. case SG_STENCILOP_KEEP: return MTLStencilOperationKeep;
  13236. case SG_STENCILOP_ZERO: return MTLStencilOperationZero;
  13237. case SG_STENCILOP_REPLACE: return MTLStencilOperationReplace;
  13238. case SG_STENCILOP_INCR_CLAMP: return MTLStencilOperationIncrementClamp;
  13239. case SG_STENCILOP_DECR_CLAMP: return MTLStencilOperationDecrementClamp;
  13240. case SG_STENCILOP_INVERT: return MTLStencilOperationInvert;
  13241. case SG_STENCILOP_INCR_WRAP: return MTLStencilOperationIncrementWrap;
  13242. case SG_STENCILOP_DECR_WRAP: return MTLStencilOperationDecrementWrap;
  13243. default: SOKOL_UNREACHABLE; return (MTLStencilOperation)0;
  13244. }
  13245. }
  13246. _SOKOL_PRIVATE MTLCullMode _sg_mtl_cull_mode(sg_cull_mode m) {
  13247. switch (m) {
  13248. case SG_CULLMODE_NONE: return MTLCullModeNone;
  13249. case SG_CULLMODE_FRONT: return MTLCullModeFront;
  13250. case SG_CULLMODE_BACK: return MTLCullModeBack;
  13251. default: SOKOL_UNREACHABLE; return (MTLCullMode)0;
  13252. }
  13253. }
  13254. _SOKOL_PRIVATE MTLWinding _sg_mtl_winding(sg_face_winding w) {
  13255. switch (w) {
  13256. case SG_FACEWINDING_CW: return MTLWindingClockwise;
  13257. case SG_FACEWINDING_CCW: return MTLWindingCounterClockwise;
  13258. default: SOKOL_UNREACHABLE; return (MTLWinding)0;
  13259. }
  13260. }
  13261. _SOKOL_PRIVATE MTLIndexType _sg_mtl_index_type(sg_index_type t) {
  13262. switch (t) {
  13263. case SG_INDEXTYPE_UINT16: return MTLIndexTypeUInt16;
  13264. case SG_INDEXTYPE_UINT32: return MTLIndexTypeUInt32;
  13265. default: SOKOL_UNREACHABLE; return (MTLIndexType)0;
  13266. }
  13267. }
  13268. _SOKOL_PRIVATE int _sg_mtl_index_size(sg_index_type t) {
  13269. switch (t) {
  13270. case SG_INDEXTYPE_NONE: return 0;
  13271. case SG_INDEXTYPE_UINT16: return 2;
  13272. case SG_INDEXTYPE_UINT32: return 4;
  13273. default: SOKOL_UNREACHABLE; return 0;
  13274. }
  13275. }
  13276. _SOKOL_PRIVATE MTLTextureType _sg_mtl_texture_type(sg_image_type t, bool msaa) {
  13277. switch (t) {
  13278. case SG_IMAGETYPE_2D: return msaa ? MTLTextureType2DMultisample : MTLTextureType2D;
  13279. case SG_IMAGETYPE_CUBE: return MTLTextureTypeCube;
  13280. case SG_IMAGETYPE_3D: return MTLTextureType3D;
  13281. // NOTE: MTLTextureType2DMultisampleArray requires macOS 10.14+, iOS 14.0+
  13282. case SG_IMAGETYPE_ARRAY: return MTLTextureType2DArray;
  13283. default: SOKOL_UNREACHABLE; return (MTLTextureType)0;
  13284. }
  13285. }
  13286. _SOKOL_PRIVATE MTLSamplerAddressMode _sg_mtl_address_mode(sg_wrap w) {
  13287. if (_sg.features.image_clamp_to_border) {
  13288. if (@available(macOS 12.0, iOS 14.0, *)) {
  13289. // border color feature available
  13290. switch (w) {
  13291. case SG_WRAP_REPEAT: return MTLSamplerAddressModeRepeat;
  13292. case SG_WRAP_CLAMP_TO_EDGE: return MTLSamplerAddressModeClampToEdge;
  13293. case SG_WRAP_CLAMP_TO_BORDER: return MTLSamplerAddressModeClampToBorderColor;
  13294. case SG_WRAP_MIRRORED_REPEAT: return MTLSamplerAddressModeMirrorRepeat;
  13295. default: SOKOL_UNREACHABLE; return (MTLSamplerAddressMode)0;
  13296. }
  13297. }
  13298. }
  13299. // fallthrough: clamp to border no supported
  13300. switch (w) {
  13301. case SG_WRAP_REPEAT: return MTLSamplerAddressModeRepeat;
  13302. case SG_WRAP_CLAMP_TO_EDGE: return MTLSamplerAddressModeClampToEdge;
  13303. case SG_WRAP_CLAMP_TO_BORDER: return MTLSamplerAddressModeClampToEdge;
  13304. case SG_WRAP_MIRRORED_REPEAT: return MTLSamplerAddressModeMirrorRepeat;
  13305. default: SOKOL_UNREACHABLE; return (MTLSamplerAddressMode)0;
  13306. }
  13307. }
  13308. _SOKOL_PRIVATE API_AVAILABLE(ios(14.0), macos(12.0)) MTLSamplerBorderColor _sg_mtl_border_color(sg_border_color c) {
  13309. switch (c) {
  13310. case SG_BORDERCOLOR_TRANSPARENT_BLACK: return MTLSamplerBorderColorTransparentBlack;
  13311. case SG_BORDERCOLOR_OPAQUE_BLACK: return MTLSamplerBorderColorOpaqueBlack;
  13312. case SG_BORDERCOLOR_OPAQUE_WHITE: return MTLSamplerBorderColorOpaqueWhite;
  13313. default: SOKOL_UNREACHABLE; return (MTLSamplerBorderColor)0;
  13314. }
  13315. }
  13316. _SOKOL_PRIVATE MTLSamplerMinMagFilter _sg_mtl_minmag_filter(sg_filter f) {
  13317. switch (f) {
  13318. case SG_FILTER_NEAREST:
  13319. return MTLSamplerMinMagFilterNearest;
  13320. case SG_FILTER_LINEAR:
  13321. return MTLSamplerMinMagFilterLinear;
  13322. default:
  13323. SOKOL_UNREACHABLE; return (MTLSamplerMinMagFilter)0;
  13324. }
  13325. }
  13326. _SOKOL_PRIVATE MTLSamplerMipFilter _sg_mtl_mipmap_filter(sg_filter f) {
  13327. switch (f) {
  13328. case SG_FILTER_NEAREST:
  13329. return MTLSamplerMipFilterNearest;
  13330. case SG_FILTER_LINEAR:
  13331. return MTLSamplerMipFilterLinear;
  13332. default:
  13333. SOKOL_UNREACHABLE; return (MTLSamplerMipFilter)0;
  13334. }
  13335. }
  13336. _SOKOL_PRIVATE size_t _sg_mtl_vertexbuffer_bindslot(size_t sokol_bindslot) {
  13337. return sokol_bindslot + _SG_MTL_MAX_STAGE_UB_SBUF_BINDINGS;
  13338. }
  13339. //-- a pool for all Metal resource objects, with deferred release queue ---------
  13340. _SOKOL_PRIVATE void _sg_mtl_init_pool(const sg_desc* desc) {
  13341. _sg.mtl.idpool.num_slots = 2 *
  13342. (
  13343. 2 * desc->buffer_pool_size +
  13344. 2 * desc->image_pool_size +
  13345. 1 * desc->sampler_pool_size +
  13346. 6 * desc->shader_pool_size +
  13347. 3 * desc->pipeline_pool_size +
  13348. 1 * desc->view_pool_size +
  13349. 128
  13350. );
  13351. _sg.mtl.idpool.pool = [NSMutableArray arrayWithCapacity:(NSUInteger)_sg.mtl.idpool.num_slots];
  13352. _SG_OBJC_RETAIN(_sg.mtl.idpool.pool);
  13353. NSNull* null = [NSNull null];
  13354. for (int i = 0; i < _sg.mtl.idpool.num_slots; i++) {
  13355. [_sg.mtl.idpool.pool addObject:null];
  13356. }
  13357. SOKOL_ASSERT([_sg.mtl.idpool.pool count] == (NSUInteger)_sg.mtl.idpool.num_slots);
  13358. // a queue of currently free slot indices
  13359. _sg.mtl.idpool.free_queue_top = 0;
  13360. _sg.mtl.idpool.free_queue = (int*)_sg_malloc_clear((size_t)_sg.mtl.idpool.num_slots * sizeof(int));
  13361. // pool slot 0 is reserved!
  13362. for (int i = _sg.mtl.idpool.num_slots-1; i >= 1; i--) {
  13363. _sg.mtl.idpool.free_queue[_sg.mtl.idpool.free_queue_top++] = i;
  13364. }
  13365. // a circular queue which holds release items (frame index when a resource is to be released, and the resource's pool index
  13366. _sg.mtl.idpool.release_queue_front = 0;
  13367. _sg.mtl.idpool.release_queue_back = 0;
  13368. _sg.mtl.idpool.release_queue = (_sg_mtl_release_item_t*)_sg_malloc_clear((size_t)_sg.mtl.idpool.num_slots * sizeof(_sg_mtl_release_item_t));
  13369. for (int i = 0; i < _sg.mtl.idpool.num_slots; i++) {
  13370. _sg.mtl.idpool.release_queue[i].frame_index = 0;
  13371. _sg.mtl.idpool.release_queue[i].slot_index = _SG_MTL_INVALID_SLOT_INDEX;
  13372. }
  13373. }
  13374. _SOKOL_PRIVATE void _sg_mtl_destroy_pool(void) {
  13375. _sg_free(_sg.mtl.idpool.release_queue); _sg.mtl.idpool.release_queue = 0;
  13376. _sg_free(_sg.mtl.idpool.free_queue); _sg.mtl.idpool.free_queue = 0;
  13377. _SG_OBJC_RELEASE(_sg.mtl.idpool.pool);
  13378. }
  13379. // get a new free resource pool slot
  13380. _SOKOL_PRIVATE int _sg_mtl_alloc_pool_slot(void) {
  13381. SOKOL_ASSERT(_sg.mtl.idpool.free_queue_top > 0);
  13382. const int slot_index = _sg.mtl.idpool.free_queue[--_sg.mtl.idpool.free_queue_top];
  13383. SOKOL_ASSERT((slot_index > 0) && (slot_index < _sg.mtl.idpool.num_slots));
  13384. return slot_index;
  13385. }
  13386. // put a free resource pool slot back into the free-queue
  13387. _SOKOL_PRIVATE void _sg_mtl_free_pool_slot(int slot_index) {
  13388. SOKOL_ASSERT(_sg.mtl.idpool.free_queue_top < _sg.mtl.idpool.num_slots);
  13389. SOKOL_ASSERT((slot_index > 0) && (slot_index < _sg.mtl.idpool.num_slots));
  13390. _sg.mtl.idpool.free_queue[_sg.mtl.idpool.free_queue_top++] = slot_index;
  13391. }
  13392. // add an MTLResource to the pool, return pool index or 0 if input was 'nil'
  13393. _SOKOL_PRIVATE int _sg_mtl_add_resource(id res) {
  13394. if (nil == res) {
  13395. return _SG_MTL_INVALID_SLOT_INDEX;
  13396. }
  13397. _sg_stats_inc(metal.idpool.num_added);
  13398. const int slot_index = _sg_mtl_alloc_pool_slot();
  13399. // NOTE: the NSMutableArray will take ownership of its items
  13400. SOKOL_ASSERT([NSNull null] == _sg.mtl.idpool.pool[(NSUInteger)slot_index]);
  13401. _sg.mtl.idpool.pool[(NSUInteger)slot_index] = res;
  13402. return slot_index;
  13403. }
  13404. /* mark an MTLResource for release, this will put the resource into the
  13405. deferred-release queue, and the resource will then be released N frames later,
  13406. the special pool index 0 will be ignored (this means that a nil
  13407. value was provided to _sg_mtl_add_resource()
  13408. */
  13409. _SOKOL_PRIVATE void _sg_mtl_release_resource(uint32_t frame_index, int slot_index) {
  13410. if (slot_index == _SG_MTL_INVALID_SLOT_INDEX) {
  13411. return;
  13412. }
  13413. _sg_stats_inc(metal.idpool.num_released);
  13414. SOKOL_ASSERT((slot_index > 0) && (slot_index < _sg.mtl.idpool.num_slots));
  13415. SOKOL_ASSERT([NSNull null] != _sg.mtl.idpool.pool[(NSUInteger)slot_index]);
  13416. int release_index = _sg.mtl.idpool.release_queue_front++;
  13417. if (_sg.mtl.idpool.release_queue_front >= _sg.mtl.idpool.num_slots) {
  13418. // wrap-around
  13419. _sg.mtl.idpool.release_queue_front = 0;
  13420. }
  13421. // release queue full?
  13422. SOKOL_ASSERT(_sg.mtl.idpool.release_queue_front != _sg.mtl.idpool.release_queue_back);
  13423. SOKOL_ASSERT(0 == _sg.mtl.idpool.release_queue[release_index].frame_index);
  13424. const uint32_t safe_to_release_frame_index = frame_index + SG_NUM_INFLIGHT_FRAMES + 1;
  13425. _sg.mtl.idpool.release_queue[release_index].frame_index = safe_to_release_frame_index;
  13426. _sg.mtl.idpool.release_queue[release_index].slot_index = slot_index;
  13427. }
  13428. // run garbage-collection pass on all resources in the release-queue
  13429. _SOKOL_PRIVATE void _sg_mtl_garbage_collect(uint32_t frame_index) {
  13430. while (_sg.mtl.idpool.release_queue_back != _sg.mtl.idpool.release_queue_front) {
  13431. if (frame_index < _sg.mtl.idpool.release_queue[_sg.mtl.idpool.release_queue_back].frame_index) {
  13432. // don't need to check further, release-items past this are too young
  13433. break;
  13434. }
  13435. _sg_stats_inc(metal.idpool.num_garbage_collected);
  13436. // safe to release this resource
  13437. const int slot_index = _sg.mtl.idpool.release_queue[_sg.mtl.idpool.release_queue_back].slot_index;
  13438. SOKOL_ASSERT((slot_index > 0) && (slot_index < _sg.mtl.idpool.num_slots));
  13439. // note: the NSMutableArray takes ownership of its items, assigning an NSNull object will
  13440. // release the object, no matter if using ARC or not
  13441. SOKOL_ASSERT(_sg.mtl.idpool.pool[(NSUInteger)slot_index] != [NSNull null]);
  13442. _sg.mtl.idpool.pool[(NSUInteger)slot_index] = [NSNull null];
  13443. // put the now free pool index back on the free queue
  13444. _sg_mtl_free_pool_slot(slot_index);
  13445. // reset the release queue slot and advance the back index
  13446. _sg.mtl.idpool.release_queue[_sg.mtl.idpool.release_queue_back].frame_index = 0;
  13447. _sg.mtl.idpool.release_queue[_sg.mtl.idpool.release_queue_back].slot_index = _SG_MTL_INVALID_SLOT_INDEX;
  13448. _sg.mtl.idpool.release_queue_back++;
  13449. if (_sg.mtl.idpool.release_queue_back >= _sg.mtl.idpool.num_slots) {
  13450. // wrap-around
  13451. _sg.mtl.idpool.release_queue_back = 0;
  13452. }
  13453. }
  13454. }
  13455. _SOKOL_PRIVATE id _sg_mtl_id(int slot_index) {
  13456. return _sg.mtl.idpool.pool[(NSUInteger)slot_index];
  13457. }
  13458. _SOKOL_PRIVATE void _sg_mtl_clear_state_cache(void) {
  13459. _sg_clear(&_sg.mtl.cache, sizeof(_sg.mtl.cache));
  13460. }
  13461. // https://developer.apple.com/metal/Metal-Feature-Set-Tables.pdf
  13462. _SOKOL_PRIVATE void _sg_mtl_init_caps(void) {
  13463. #if defined(_SG_TARGET_MACOS)
  13464. _sg.backend = SG_BACKEND_METAL_MACOS;
  13465. #elif defined(_SG_TARGET_IOS)
  13466. #if defined(_SG_TARGET_IOS_SIMULATOR)
  13467. _sg.backend = SG_BACKEND_METAL_SIMULATOR;
  13468. #else
  13469. _sg.backend = SG_BACKEND_METAL_IOS;
  13470. #endif
  13471. #endif
  13472. _sg.features.origin_top_left = true;
  13473. _sg.features.mrt_independent_blend_state = true;
  13474. _sg.features.mrt_independent_write_mask = true;
  13475. _sg.features.compute = true;
  13476. _sg.features.msaa_texture_bindings = true;
  13477. _sg.features.draw_base_vertex = true;
  13478. _sg.features.draw_base_instance = true;
  13479. _sg.features.image_clamp_to_border = false;
  13480. #if (MAC_OS_X_VERSION_MAX_ALLOWED >= 120000) || (__IPHONE_OS_VERSION_MAX_ALLOWED >= 140000)
  13481. if (@available(macOS 12.0, iOS 14.0, *)) {
  13482. _sg.features.image_clamp_to_border = [_sg.mtl.device supportsFamily:MTLGPUFamilyApple7]
  13483. || [_sg.mtl.device supportsFamily:MTLGPUFamilyMac2];
  13484. #if (MAC_OS_X_VERSION_MAX_ALLOWED >= 130000) || (__IPHONE_OS_VERSION_MAX_ALLOWED >= 160000)
  13485. if (!_sg.features.image_clamp_to_border) {
  13486. if (@available(macOS 13.0, iOS 16.0, *)) {
  13487. _sg.features.image_clamp_to_border = [_sg.mtl.device supportsFamily:MTLGPUFamilyMetal3];
  13488. }
  13489. }
  13490. #endif
  13491. }
  13492. #endif
  13493. #if defined(_SG_TARGET_MACOS)
  13494. _sg.limits.max_image_size_2d = 16 * 1024;
  13495. _sg.limits.max_image_size_cube = 16 * 1024;
  13496. _sg.limits.max_image_size_3d = 2 * 1024;
  13497. _sg.limits.max_image_size_array = 16 * 1024;
  13498. _sg.limits.max_image_array_layers = 2 * 1024;
  13499. _sg.limits.max_texture_bindings_per_stage = _sg_min(128, SG_MAX_VIEW_BINDSLOTS);
  13500. #else
  13501. // FIXME: newer iOS devices support 16k textures
  13502. _sg.limits.max_image_size_2d = 8 * 1024;
  13503. _sg.limits.max_image_size_cube = 8 * 1024;
  13504. _sg.limits.max_image_size_3d = 2 * 1024;
  13505. _sg.limits.max_image_size_array = 8 * 1024;
  13506. _sg.limits.max_image_array_layers = 2 * 1024;
  13507. _sg.limits.max_texture_bindings_per_stage = _sg_min(96, SG_MAX_VIEW_BINDSLOTS); // since iPhone8
  13508. #endif
  13509. _sg.limits.max_storage_image_bindings_per_stage = _sg.limits.max_texture_bindings_per_stage; // shared with texture bindings
  13510. _sg.limits.max_storage_buffer_bindings_per_stage = _sg_min(_SG_MTL_MAX_STAGE_BUFFER_BINDINGS - (SG_MAX_VERTEXBUFFER_BINDSLOTS + SG_MAX_UNIFORMBLOCK_BINDSLOTS), SG_MAX_VIEW_BINDSLOTS);
  13511. _sg.limits.max_color_attachments = _sg_min(8, SG_MAX_COLOR_ATTACHMENTS);
  13512. _sg.limits.max_vertex_attrs = SG_MAX_VERTEX_ATTRIBUTES;
  13513. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R8]);
  13514. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R8SN]);
  13515. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_R8UI]);
  13516. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_R8SI]);
  13517. #if defined(_SG_TARGET_MACOS)
  13518. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R16]);
  13519. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R16SN]);
  13520. #else
  13521. _sg_pixelformat_sfbr(&_sg.formats[SG_PIXELFORMAT_R16]);
  13522. _sg_pixelformat_sfbr(&_sg.formats[SG_PIXELFORMAT_R16SN]);
  13523. #endif
  13524. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_R16UI]);
  13525. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_R16SI]);
  13526. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R16F]);
  13527. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG8]);
  13528. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG8SN]);
  13529. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG8UI]);
  13530. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG8SI]);
  13531. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R32UI]);
  13532. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R32SI]);
  13533. #if defined(_SG_TARGET_MACOS)
  13534. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R32F]);
  13535. #else
  13536. _sg_pixelformat_sbr(&_sg.formats[SG_PIXELFORMAT_R32F]);
  13537. #endif
  13538. #if defined(_SG_TARGET_MACOS)
  13539. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG16]);
  13540. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG16SN]);
  13541. #else
  13542. _sg_pixelformat_sfbr(&_sg.formats[SG_PIXELFORMAT_RG16]);
  13543. _sg_pixelformat_sfbr(&_sg.formats[SG_PIXELFORMAT_RG16SN]);
  13544. #endif
  13545. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG16UI]);
  13546. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG16SI]);
  13547. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG16F]);
  13548. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA8]);
  13549. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_SRGB8A8]);
  13550. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA8SN]);
  13551. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA8UI]);
  13552. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA8SI]);
  13553. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_BGRA8]);
  13554. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGB10A2]);
  13555. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG11B10F]);
  13556. #if defined(_SG_TARGET_MACOS)
  13557. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RGB9E5]);
  13558. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG32UI]);
  13559. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RG32SI]);
  13560. #else
  13561. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGB9E5]);
  13562. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG32UI]);
  13563. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG32SI]);
  13564. #endif
  13565. #if defined(_SG_TARGET_MACOS)
  13566. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  13567. #else
  13568. _sg_pixelformat_sbr(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  13569. #endif
  13570. #if defined(_SG_TARGET_MACOS)
  13571. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA16]);
  13572. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA16SN]);
  13573. #else
  13574. _sg_pixelformat_sfbr(&_sg.formats[SG_PIXELFORMAT_RGBA16]);
  13575. _sg_pixelformat_sfbr(&_sg.formats[SG_PIXELFORMAT_RGBA16SN]);
  13576. #endif
  13577. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA16UI]);
  13578. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA16SI]);
  13579. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA16F]);
  13580. #if defined(_SG_TARGET_MACOS)
  13581. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA32UI]);
  13582. _sg_pixelformat_srm(&_sg.formats[SG_PIXELFORMAT_RGBA32SI]);
  13583. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  13584. #else
  13585. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA32UI]);
  13586. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA32SI]);
  13587. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  13588. #endif
  13589. _sg_pixelformat_srmd(&_sg.formats[SG_PIXELFORMAT_DEPTH]);
  13590. _sg_pixelformat_srmd(&_sg.formats[SG_PIXELFORMAT_DEPTH_STENCIL]);
  13591. #if defined(_SG_TARGET_MACOS)
  13592. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC1_RGBA]);
  13593. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC2_RGBA]);
  13594. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC3_RGBA]);
  13595. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC3_SRGBA]);
  13596. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC4_R]);
  13597. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC4_RSN]);
  13598. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC5_RG]);
  13599. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC5_RGSN]);
  13600. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC6H_RGBF]);
  13601. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC6H_RGBUF]);
  13602. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC7_RGBA]);
  13603. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC7_SRGBA]);
  13604. #else
  13605. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGB8]);
  13606. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_SRGB8]);
  13607. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGB8A1]);
  13608. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGBA8]);
  13609. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_SRGB8A8]);
  13610. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_R11]);
  13611. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_R11SN]);
  13612. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_RG11]);
  13613. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_RG11SN]);
  13614. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ASTC_4x4_RGBA]);
  13615. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ASTC_4x4_SRGBA]);
  13616. #endif
  13617. // compute shader access (see: https://github.com/gpuweb/gpuweb/issues/513)
  13618. // for now let's use the same conservative set on all backends even though
  13619. // some backends are less restrictive
  13620. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8]);
  13621. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8SN]);
  13622. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8UI]);
  13623. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8SI]);
  13624. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16UI]);
  13625. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16SI]);
  13626. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16F]);
  13627. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32UI]);
  13628. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32SI]);
  13629. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32F]);
  13630. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32UI]);
  13631. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32SI]);
  13632. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  13633. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32UI]);
  13634. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32SI]);
  13635. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  13636. }
  13637. //-- main Metal backend state and functions ------------------------------------
  13638. _SOKOL_PRIVATE void _sg_mtl_setup_backend(const sg_desc* desc) {
  13639. // assume already zero-initialized
  13640. SOKOL_ASSERT(desc);
  13641. SOKOL_ASSERT(desc->environment.metal.device);
  13642. SOKOL_ASSERT(desc->uniform_buffer_size > 0);
  13643. _sg_mtl_init_pool(desc);
  13644. _sg_mtl_clear_state_cache();
  13645. _sg.mtl.valid = true;
  13646. _sg.mtl.ub_size = desc->uniform_buffer_size;
  13647. _sg.mtl.sem = dispatch_semaphore_create(SG_NUM_INFLIGHT_FRAMES);
  13648. _sg.mtl.device = (__bridge id<MTLDevice>) desc->environment.metal.device;
  13649. _sg.mtl.cmd_queue = [_sg.mtl.device newCommandQueue];
  13650. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  13651. _sg.mtl.uniform_buffers[i] = [_sg.mtl.device
  13652. newBufferWithLength:(NSUInteger)_sg.mtl.ub_size
  13653. options:MTLResourceCPUCacheModeWriteCombined|MTLResourceStorageModeShared
  13654. ];
  13655. #if defined(SOKOL_DEBUG)
  13656. _sg.mtl.uniform_buffers[i].label = [NSString stringWithFormat:@"sg-uniform-buffer.%d", i];
  13657. #endif
  13658. }
  13659. if (desc->metal.force_managed_storage_mode) {
  13660. _sg.mtl.use_shared_storage_mode = false;
  13661. } else if (@available(macOS 10.15, iOS 13.0, *)) {
  13662. // on Intel Macs, always use managed resources even though the
  13663. // device says it supports unified memory (because of texture restrictions)
  13664. const bool is_apple_gpu = [_sg.mtl.device supportsFamily:MTLGPUFamilyApple1];
  13665. if (!is_apple_gpu) {
  13666. _sg.mtl.use_shared_storage_mode = false;
  13667. } else {
  13668. _sg.mtl.use_shared_storage_mode = true;
  13669. }
  13670. } else {
  13671. #if defined(_SG_TARGET_MACOS)
  13672. _sg.mtl.use_shared_storage_mode = false;
  13673. #else
  13674. _sg.mtl.use_shared_storage_mode = true;
  13675. #endif
  13676. }
  13677. _sg_mtl_init_caps();
  13678. }
  13679. _SOKOL_PRIVATE void _sg_mtl_discard_backend(void) {
  13680. SOKOL_ASSERT(_sg.mtl.valid);
  13681. // wait for the last frame to finish
  13682. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  13683. dispatch_semaphore_wait(_sg.mtl.sem, DISPATCH_TIME_FOREVER);
  13684. }
  13685. // semaphore must be "relinquished" before destruction
  13686. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  13687. dispatch_semaphore_signal(_sg.mtl.sem);
  13688. }
  13689. _sg_mtl_garbage_collect(_sg.frame_index + SG_NUM_INFLIGHT_FRAMES + 2);
  13690. _sg_mtl_destroy_pool();
  13691. _sg.mtl.valid = false;
  13692. _SG_OBJC_RELEASE(_sg.mtl.sem);
  13693. _SG_OBJC_RELEASE(_sg.mtl.device);
  13694. _SG_OBJC_RELEASE(_sg.mtl.cmd_queue);
  13695. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  13696. _SG_OBJC_RELEASE(_sg.mtl.uniform_buffers[i]);
  13697. }
  13698. // NOTE: MTLCommandBuffer, MTLRenderCommandEncoder and MTLComputeCommandEncoder are auto-released
  13699. _sg.mtl.cmd_buffer = nil;
  13700. _sg.mtl.render_cmd_encoder = nil;
  13701. _sg.mtl.compute_cmd_encoder = nil;
  13702. }
  13703. _SOKOL_PRIVATE void _sg_mtl_reset_state_cache(void) {
  13704. _sg_mtl_clear_state_cache();
  13705. }
  13706. _SOKOL_PRIVATE sg_resource_state _sg_mtl_create_buffer(_sg_buffer_t* buf, const sg_buffer_desc* desc) {
  13707. SOKOL_ASSERT(buf && desc);
  13708. SOKOL_ASSERT(buf->cmn.size > 0);
  13709. const bool injected = (0 != desc->mtl_buffers[0]);
  13710. MTLResourceOptions mtl_options = _sg_mtl_buffer_resource_options(&buf->cmn.usage);
  13711. for (int slot = 0; slot < buf->cmn.num_slots; slot++) {
  13712. id<MTLBuffer> mtl_buf;
  13713. if (injected) {
  13714. SOKOL_ASSERT(desc->mtl_buffers[slot]);
  13715. mtl_buf = (__bridge id<MTLBuffer>) desc->mtl_buffers[slot];
  13716. } else {
  13717. if (desc->data.ptr) {
  13718. SOKOL_ASSERT(desc->data.size > 0);
  13719. mtl_buf = [_sg.mtl.device newBufferWithBytes:desc->data.ptr length:(NSUInteger)buf->cmn.size options:mtl_options];
  13720. } else {
  13721. mtl_buf = [_sg.mtl.device newBufferWithLength:(NSUInteger)buf->cmn.size options:mtl_options];
  13722. }
  13723. if (nil == mtl_buf) {
  13724. _SG_ERROR(METAL_CREATE_BUFFER_FAILED);
  13725. return SG_RESOURCESTATE_FAILED;
  13726. }
  13727. }
  13728. #if defined(SOKOL_DEBUG)
  13729. if (desc->label) {
  13730. mtl_buf.label = [NSString stringWithFormat:@"%s.%d", desc->label, slot];
  13731. }
  13732. #endif
  13733. buf->mtl.buf[slot] = _sg_mtl_add_resource(mtl_buf);
  13734. _SG_OBJC_RELEASE(mtl_buf);
  13735. }
  13736. return SG_RESOURCESTATE_VALID;
  13737. }
  13738. _SOKOL_PRIVATE void _sg_mtl_discard_buffer(_sg_buffer_t* buf) {
  13739. SOKOL_ASSERT(buf);
  13740. for (int slot = 0; slot < buf->cmn.num_slots; slot++) {
  13741. // it's valid to call release resource with '0'
  13742. _sg_mtl_release_resource(_sg.frame_index, buf->mtl.buf[slot]);
  13743. }
  13744. }
  13745. _SOKOL_PRIVATE void _sg_mtl_copy_image_data(const _sg_image_t* img, __unsafe_unretained id<MTLTexture> mtl_tex, const sg_image_data* data) {
  13746. const int num_slices = (img->cmn.type == SG_IMAGETYPE_3D) ? 1 : img->cmn.num_slices;
  13747. for (int mip_index = 0; mip_index < img->cmn.num_mipmaps; mip_index++) {
  13748. SOKOL_ASSERT(data->mip_levels[mip_index].ptr);
  13749. SOKOL_ASSERT(data->mip_levels[mip_index].size > 0);
  13750. const uint8_t* data_ptr = (const uint8_t*)data->mip_levels[mip_index].ptr;
  13751. const int mip_width = _sg_miplevel_dim(img->cmn.width, mip_index);
  13752. const int mip_height = _sg_miplevel_dim(img->cmn.height, mip_index);
  13753. int bytes_per_row = _sg_row_pitch(img->cmn.pixel_format, mip_width, 1);
  13754. int bytes_per_slice = _sg_surface_pitch(img->cmn.pixel_format, mip_width, mip_height, 1);
  13755. /* bytesPerImage special case: https://developer.apple.com/documentation/metal/mtltexture/1515679-replaceregion
  13756. "Supply a nonzero value only when you copy data to a MTLTextureType3D type texture"
  13757. */
  13758. MTLRegion region;
  13759. int bytes_per_image;
  13760. if (img->cmn.type == SG_IMAGETYPE_3D) {
  13761. const int mip_depth = _sg_miplevel_dim(img->cmn.num_slices, mip_index);
  13762. region = MTLRegionMake3D(0, 0, 0, (NSUInteger)mip_width, (NSUInteger)mip_height, (NSUInteger)mip_depth);
  13763. bytes_per_image = bytes_per_slice;
  13764. // FIXME: apparently the minimal bytes_per_image size for 3D texture is 4 KByte... somehow need to handle this
  13765. } else {
  13766. region = MTLRegionMake2D(0, 0, (NSUInteger)mip_width, (NSUInteger)mip_height);
  13767. bytes_per_image = 0;
  13768. }
  13769. for (int slice_index = 0; slice_index < num_slices; slice_index++) {
  13770. const int slice_offset = slice_index * bytes_per_slice;
  13771. SOKOL_ASSERT((slice_offset + bytes_per_slice) <= (int)data->mip_levels[mip_index].size);
  13772. [mtl_tex replaceRegion:region
  13773. mipmapLevel:(NSUInteger)mip_index
  13774. slice:(NSUInteger)slice_index
  13775. withBytes:data_ptr + slice_offset
  13776. bytesPerRow:(NSUInteger)bytes_per_row
  13777. bytesPerImage:(NSUInteger)bytes_per_image];
  13778. }
  13779. }
  13780. }
  13781. _SOKOL_PRIVATE bool _sg_mtl_init_texdesc(MTLTextureDescriptor* mtl_desc, _sg_image_t* img) {
  13782. mtl_desc.textureType = _sg_mtl_texture_type(img->cmn.type, img->cmn.sample_count > 1);
  13783. mtl_desc.pixelFormat = _sg_mtl_pixel_format(img->cmn.pixel_format);
  13784. if (MTLPixelFormatInvalid == mtl_desc.pixelFormat) {
  13785. _SG_ERROR(METAL_TEXTURE_FORMAT_NOT_SUPPORTED);
  13786. return false;
  13787. }
  13788. mtl_desc.width = (NSUInteger)img->cmn.width;
  13789. mtl_desc.height = (NSUInteger)img->cmn.height;
  13790. if (SG_IMAGETYPE_3D == img->cmn.type) {
  13791. mtl_desc.depth = (NSUInteger)img->cmn.num_slices;
  13792. } else {
  13793. mtl_desc.depth = 1;
  13794. }
  13795. mtl_desc.mipmapLevelCount = (NSUInteger)img->cmn.num_mipmaps;
  13796. if (SG_IMAGETYPE_ARRAY == img->cmn.type) {
  13797. mtl_desc.arrayLength = (NSUInteger)img->cmn.num_slices;
  13798. } else {
  13799. mtl_desc.arrayLength = 1;
  13800. }
  13801. mtl_desc.sampleCount = (NSUInteger)img->cmn.sample_count;
  13802. const sg_image_usage* usg = &img->cmn.usage;
  13803. const bool any_attachment = usg->color_attachment || usg->resolve_attachment || usg->depth_stencil_attachment;
  13804. MTLTextureUsage mtl_tex_usage = MTLTextureUsageShaderRead;
  13805. if (any_attachment) {
  13806. mtl_tex_usage |= MTLTextureUsageRenderTarget;
  13807. }
  13808. if (img->cmn.usage.storage_image) {
  13809. mtl_tex_usage |= MTLTextureUsageShaderWrite;
  13810. }
  13811. mtl_desc.usage = mtl_tex_usage;
  13812. MTLResourceOptions mtl_res_options = 0;
  13813. if (any_attachment || img->cmn.usage.storage_image) {
  13814. mtl_res_options |= MTLResourceStorageModePrivate;
  13815. } else {
  13816. mtl_res_options |= _sg_mtl_resource_options_storage_mode_managed_or_shared();
  13817. if (!img->cmn.usage.immutable) {
  13818. mtl_res_options |= MTLResourceCPUCacheModeWriteCombined;
  13819. }
  13820. }
  13821. mtl_desc.resourceOptions = mtl_res_options;
  13822. return true;
  13823. }
  13824. _SOKOL_PRIVATE sg_resource_state _sg_mtl_create_image(_sg_image_t* img, const sg_image_desc* desc) {
  13825. SOKOL_ASSERT(img && desc);
  13826. const bool injected = (0 != desc->mtl_textures[0]);
  13827. // first initialize all Metal resource pool slots to 'empty'
  13828. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  13829. img->mtl.tex[i] = _sg_mtl_add_resource(nil);
  13830. }
  13831. // initialize a Metal texture descriptor
  13832. MTLTextureDescriptor* mtl_desc = [[MTLTextureDescriptor alloc] init];
  13833. if (!_sg_mtl_init_texdesc(mtl_desc, img)) {
  13834. _SG_OBJC_RELEASE(mtl_desc);
  13835. return SG_RESOURCESTATE_FAILED;
  13836. }
  13837. for (int slot = 0; slot < img->cmn.num_slots; slot++) {
  13838. id<MTLTexture> mtl_tex;
  13839. if (injected) {
  13840. SOKOL_ASSERT(desc->mtl_textures[slot]);
  13841. mtl_tex = (__bridge id<MTLTexture>) desc->mtl_textures[slot];
  13842. } else {
  13843. mtl_tex = [_sg.mtl.device newTextureWithDescriptor:mtl_desc];
  13844. if (nil == mtl_tex) {
  13845. _SG_OBJC_RELEASE(mtl_desc);
  13846. _SG_ERROR(METAL_CREATE_TEXTURE_FAILED);
  13847. return SG_RESOURCESTATE_FAILED;
  13848. }
  13849. if (desc->data.mip_levels[0].ptr) {
  13850. _sg_mtl_copy_image_data(img, mtl_tex, &desc->data);
  13851. }
  13852. }
  13853. #if defined(SOKOL_DEBUG)
  13854. if (desc->label) {
  13855. mtl_tex.label = [NSString stringWithFormat:@"%s.%d", desc->label, slot];
  13856. }
  13857. #endif
  13858. img->mtl.tex[slot] = _sg_mtl_add_resource(mtl_tex);
  13859. _SG_OBJC_RELEASE(mtl_tex);
  13860. }
  13861. _SG_OBJC_RELEASE(mtl_desc);
  13862. return SG_RESOURCESTATE_VALID;
  13863. }
  13864. _SOKOL_PRIVATE void _sg_mtl_discard_image(_sg_image_t* img) {
  13865. SOKOL_ASSERT(img);
  13866. // it's valid to call release resource with a 'null resource'
  13867. for (int slot = 0; slot < img->cmn.num_slots; slot++) {
  13868. _sg_mtl_release_resource(_sg.frame_index, img->mtl.tex[slot]);
  13869. }
  13870. }
  13871. _SOKOL_PRIVATE sg_resource_state _sg_mtl_create_sampler(_sg_sampler_t* smp, const sg_sampler_desc* desc) {
  13872. SOKOL_ASSERT(smp && desc);
  13873. id<MTLSamplerState> mtl_smp;
  13874. const bool injected = (0 != desc->mtl_sampler);
  13875. if (injected) {
  13876. SOKOL_ASSERT(desc->mtl_sampler);
  13877. mtl_smp = (__bridge id<MTLSamplerState>) desc->mtl_sampler;
  13878. } else {
  13879. MTLSamplerDescriptor* mtl_desc = [[MTLSamplerDescriptor alloc] init];
  13880. mtl_desc.sAddressMode = _sg_mtl_address_mode(desc->wrap_u);
  13881. mtl_desc.tAddressMode = _sg_mtl_address_mode(desc->wrap_v);
  13882. mtl_desc.rAddressMode = _sg_mtl_address_mode(desc->wrap_w);
  13883. if (_sg.features.image_clamp_to_border) {
  13884. if (@available(macOS 12.0, iOS 14.0, *)) {
  13885. mtl_desc.borderColor = _sg_mtl_border_color(desc->border_color);
  13886. }
  13887. }
  13888. mtl_desc.minFilter = _sg_mtl_minmag_filter(desc->min_filter);
  13889. mtl_desc.magFilter = _sg_mtl_minmag_filter(desc->mag_filter);
  13890. mtl_desc.mipFilter = _sg_mtl_mipmap_filter(desc->mipmap_filter);
  13891. mtl_desc.lodMinClamp = desc->min_lod;
  13892. mtl_desc.lodMaxClamp = desc->max_lod;
  13893. // FIXME: lodAverage?
  13894. mtl_desc.maxAnisotropy = desc->max_anisotropy;
  13895. mtl_desc.normalizedCoordinates = YES;
  13896. mtl_desc.compareFunction = _sg_mtl_compare_func(desc->compare);
  13897. #if defined(SOKOL_DEBUG)
  13898. if (desc->label) {
  13899. mtl_desc.label = [NSString stringWithUTF8String:desc->label];
  13900. }
  13901. #endif
  13902. mtl_smp = [_sg.mtl.device newSamplerStateWithDescriptor:mtl_desc];
  13903. _SG_OBJC_RELEASE(mtl_desc);
  13904. if (nil == mtl_smp) {
  13905. _SG_ERROR(METAL_CREATE_SAMPLER_FAILED);
  13906. return SG_RESOURCESTATE_FAILED;
  13907. }
  13908. }
  13909. smp->mtl.sampler_state = _sg_mtl_add_resource(mtl_smp);
  13910. _SG_OBJC_RELEASE(mtl_smp);
  13911. return SG_RESOURCESTATE_VALID;
  13912. }
  13913. _SOKOL_PRIVATE void _sg_mtl_discard_sampler(_sg_sampler_t* smp) {
  13914. SOKOL_ASSERT(smp);
  13915. // it's valid to call release resource with a 'null resource'
  13916. _sg_mtl_release_resource(_sg.frame_index, smp->mtl.sampler_state);
  13917. }
  13918. _SOKOL_PRIVATE id<MTLLibrary> _sg_mtl_compile_library(const char* src) {
  13919. NSError* err = NULL;
  13920. id<MTLLibrary> lib = [_sg.mtl.device
  13921. newLibraryWithSource:[NSString stringWithUTF8String:src]
  13922. options:nil
  13923. error:&err
  13924. ];
  13925. if (err) {
  13926. _SG_ERROR(METAL_SHADER_COMPILATION_FAILED);
  13927. _SG_LOGMSG(METAL_SHADER_COMPILATION_OUTPUT, [err.localizedDescription UTF8String]);
  13928. }
  13929. return lib;
  13930. }
  13931. _SOKOL_PRIVATE id<MTLLibrary> _sg_mtl_library_from_bytecode(const void* ptr, size_t num_bytes) {
  13932. NSError* err = NULL;
  13933. dispatch_data_t lib_data = dispatch_data_create(ptr, num_bytes, NULL, DISPATCH_DATA_DESTRUCTOR_DEFAULT);
  13934. id<MTLLibrary> lib = [_sg.mtl.device newLibraryWithData:lib_data error:&err];
  13935. if (err) {
  13936. _SG_ERROR(METAL_SHADER_CREATION_FAILED);
  13937. _SG_LOGMSG(METAL_SHADER_COMPILATION_OUTPUT, [err.localizedDescription UTF8String]);
  13938. }
  13939. _SG_OBJC_RELEASE(lib_data);
  13940. return lib;
  13941. }
  13942. _SOKOL_PRIVATE bool _sg_mtl_create_shader_func(const sg_shader_function* func, const char* label, const char* label_ext, _sg_mtl_shader_func_t* res) {
  13943. SOKOL_ASSERT(res->mtl_lib == _SG_MTL_INVALID_SLOT_INDEX);
  13944. SOKOL_ASSERT(res->mtl_func == _SG_MTL_INVALID_SLOT_INDEX);
  13945. id<MTLLibrary> mtl_lib = nil;
  13946. if (func->bytecode.ptr) {
  13947. SOKOL_ASSERT(func->bytecode.size > 0);
  13948. mtl_lib = _sg_mtl_library_from_bytecode(func->bytecode.ptr, func->bytecode.size);
  13949. } else if (func->source) {
  13950. mtl_lib = _sg_mtl_compile_library(func->source);
  13951. }
  13952. if (mtl_lib == nil) {
  13953. return false;
  13954. }
  13955. #if defined(SOKOL_DEBUG)
  13956. if (label) {
  13957. SOKOL_ASSERT(label_ext);
  13958. mtl_lib.label = [NSString stringWithFormat:@"%s.%s", label, label_ext];
  13959. }
  13960. #else
  13961. _SOKOL_UNUSED(label);
  13962. _SOKOL_UNUSED(label_ext);
  13963. #endif
  13964. SOKOL_ASSERT(func->entry);
  13965. id<MTLFunction> mtl_func = [mtl_lib newFunctionWithName:[NSString stringWithUTF8String:func->entry]];
  13966. if (mtl_func == nil) {
  13967. _SG_ERROR(METAL_SHADER_ENTRY_NOT_FOUND);
  13968. _SG_OBJC_RELEASE(mtl_lib);
  13969. return false;
  13970. }
  13971. res->mtl_lib = _sg_mtl_add_resource(mtl_lib);
  13972. res->mtl_func = _sg_mtl_add_resource(mtl_func);
  13973. _SG_OBJC_RELEASE(mtl_lib);
  13974. _SG_OBJC_RELEASE(mtl_func);
  13975. return true;
  13976. }
  13977. _SOKOL_PRIVATE void _sg_mtl_discard_shader_func(const _sg_mtl_shader_func_t* func) {
  13978. // it is valid to call _sg_mtl_release_resource with a 'null resource'
  13979. _sg_mtl_release_resource(_sg.frame_index, func->mtl_func);
  13980. _sg_mtl_release_resource(_sg.frame_index, func->mtl_lib);
  13981. }
  13982. // NOTE: this is an out-of-range check for MSL bindslots that's also active in release mode
  13983. _SOKOL_PRIVATE bool _sg_mtl_ensure_msl_bindslot_ranges(const sg_shader_desc* desc) {
  13984. SOKOL_ASSERT(desc);
  13985. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  13986. const sg_shader_uniform_block* ub = &desc->uniform_blocks[i];
  13987. if (ub->stage != SG_SHADERSTAGE_NONE) {
  13988. if (ub->msl_buffer_n >= _SG_MTL_MAX_STAGE_UB_BINDINGS) {
  13989. _SG_ERROR(METAL_UNIFORMBLOCK_MSL_BUFFER_SLOT_OUT_OF_RANGE);
  13990. return false;
  13991. }
  13992. }
  13993. }
  13994. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  13995. const sg_shader_view* view = &desc->views[i];
  13996. if (view->texture.stage != SG_SHADERSTAGE_NONE) {
  13997. if (view->texture.msl_texture_n >= _SG_MTL_MAX_STAGE_TEXTURE_BINDINGS) {
  13998. _SG_ERROR(METAL_IMAGE_MSL_TEXTURE_SLOT_OUT_OF_RANGE);
  13999. return false;
  14000. }
  14001. }
  14002. if (view->storage_buffer.stage != SG_SHADERSTAGE_NONE) {
  14003. if ((view->storage_buffer.msl_buffer_n < _SG_MTL_MAX_STAGE_UB_BINDINGS) ||
  14004. (view->storage_buffer.msl_buffer_n >= _SG_MTL_MAX_STAGE_UB_SBUF_BINDINGS))
  14005. {
  14006. _SG_ERROR(METAL_STORAGEBUFFER_MSL_BUFFER_SLOT_OUT_OF_RANGE);
  14007. return false;
  14008. }
  14009. }
  14010. if (view->storage_image.stage != SG_SHADERSTAGE_NONE) {
  14011. if (view->storage_image.msl_texture_n >= _SG_MTL_MAX_STAGE_TEXTURE_BINDINGS) {
  14012. _SG_ERROR(METAL_STORAGEIMAGE_MSL_TEXTURE_SLOT_OUT_OF_RANGE);
  14013. return false;
  14014. }
  14015. }
  14016. }
  14017. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  14018. const sg_shader_sampler* smp = &desc->samplers[i];
  14019. if (smp->stage != SG_SHADERSTAGE_NONE) {
  14020. if (smp->msl_sampler_n >= _SG_MTL_MAX_STAGE_SAMPLER_BINDINGS) {
  14021. _SG_ERROR(METAL_SAMPLER_MSL_SAMPLER_SLOT_OUT_OF_RANGE);
  14022. return false;
  14023. }
  14024. }
  14025. }
  14026. return true;
  14027. }
  14028. _SOKOL_PRIVATE sg_resource_state _sg_mtl_create_shader(_sg_shader_t* shd, const sg_shader_desc* desc) {
  14029. SOKOL_ASSERT(shd && desc);
  14030. // do a MSL bindslot range check also in release mode, and if that fails,
  14031. // also fail shader creation
  14032. if (!_sg_mtl_ensure_msl_bindslot_ranges(desc)) {
  14033. return SG_RESOURCESTATE_FAILED;
  14034. }
  14035. shd->mtl.threads_per_threadgroup = MTLSizeMake(
  14036. (NSUInteger)desc->mtl_threads_per_threadgroup.x,
  14037. (NSUInteger)desc->mtl_threads_per_threadgroup.y,
  14038. (NSUInteger)desc->mtl_threads_per_threadgroup.z);
  14039. // copy resource bindslot mappings
  14040. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  14041. shd->mtl.ub_buffer_n[i] = desc->uniform_blocks[i].msl_buffer_n;
  14042. }
  14043. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  14044. const sg_shader_view* view = &desc->views[i];
  14045. SOKOL_ASSERT(0 == shd->mtl.view_buffer_texture_n[i]);
  14046. if (view->storage_buffer.stage != SG_SHADERSTAGE_NONE) {
  14047. shd->mtl.view_buffer_texture_n[i] = view->storage_buffer.msl_buffer_n;
  14048. } else if (view->texture.stage != SG_SHADERSTAGE_NONE) {
  14049. shd->mtl.view_buffer_texture_n[i] = view->texture.msl_texture_n;
  14050. } else if (view->storage_image.stage != SG_SHADERSTAGE_NONE) {
  14051. shd->mtl.view_buffer_texture_n[i] = view->storage_image.msl_texture_n;
  14052. }
  14053. }
  14054. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  14055. shd->mtl.smp_sampler_n[i] = desc->samplers[i].msl_sampler_n;
  14056. }
  14057. // create metal library and function objects
  14058. bool shd_valid = true;
  14059. if (desc->vertex_func.source || desc->vertex_func.bytecode.ptr) {
  14060. shd_valid &= _sg_mtl_create_shader_func(&desc->vertex_func, desc->label, "vs", &shd->mtl.vertex_func);
  14061. }
  14062. if (desc->fragment_func.source || desc->fragment_func.bytecode.ptr) {
  14063. shd_valid &= _sg_mtl_create_shader_func(&desc->fragment_func, desc->label, "fs", &shd->mtl.fragment_func);
  14064. }
  14065. if (desc->compute_func.source || desc->compute_func.bytecode.ptr) {
  14066. shd_valid &= _sg_mtl_create_shader_func(&desc->compute_func, desc->label, "cs", &shd->mtl.compute_func);
  14067. }
  14068. if (!shd_valid) {
  14069. _sg_mtl_discard_shader_func(&shd->mtl.vertex_func);
  14070. _sg_mtl_discard_shader_func(&shd->mtl.fragment_func);
  14071. _sg_mtl_discard_shader_func(&shd->mtl.compute_func);
  14072. }
  14073. return shd_valid ? SG_RESOURCESTATE_VALID : SG_RESOURCESTATE_FAILED;
  14074. }
  14075. _SOKOL_PRIVATE void _sg_mtl_discard_shader(_sg_shader_t* shd) {
  14076. SOKOL_ASSERT(shd);
  14077. _sg_mtl_discard_shader_func(&shd->mtl.vertex_func);
  14078. _sg_mtl_discard_shader_func(&shd->mtl.fragment_func);
  14079. _sg_mtl_discard_shader_func(&shd->mtl.compute_func);
  14080. }
  14081. _SOKOL_PRIVATE sg_resource_state _sg_mtl_create_pipeline(_sg_pipeline_t* pip, const sg_pipeline_desc* desc) {
  14082. SOKOL_ASSERT(pip && desc);
  14083. _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  14084. if (pip->cmn.is_compute) {
  14085. NSError* err = NULL;
  14086. MTLComputePipelineDescriptor* cp_desc = [[MTLComputePipelineDescriptor alloc] init];
  14087. cp_desc.computeFunction = _sg_mtl_id(shd->mtl.compute_func.mtl_func);
  14088. cp_desc.threadGroupSizeIsMultipleOfThreadExecutionWidth = true;
  14089. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  14090. const _sg_shader_view_t* view = &shd->cmn.views[i];
  14091. if (view->view_type != SG_VIEWTYPE_STORAGEBUFFER) {
  14092. continue;
  14093. }
  14094. if (!view->sbuf_readonly) {
  14095. continue;
  14096. }
  14097. SOKOL_ASSERT(view->stage == SG_SHADERSTAGE_COMPUTE);
  14098. const NSUInteger mtl_slot = shd->mtl.view_buffer_texture_n[i];
  14099. SOKOL_ASSERT(mtl_slot < _SG_MTL_MAX_STAGE_BUFFER_BINDINGS);
  14100. cp_desc.buffers[mtl_slot].mutability = MTLMutabilityImmutable;
  14101. }
  14102. #if defined(SOKOL_DEBUG)
  14103. if (desc->label) {
  14104. cp_desc.label = [NSString stringWithFormat:@"%s", desc->label];
  14105. }
  14106. #endif
  14107. id<MTLComputePipelineState> mtl_cps = [_sg.mtl.device
  14108. newComputePipelineStateWithDescriptor:cp_desc
  14109. options:MTLPipelineOptionNone
  14110. reflection:nil
  14111. error:&err];
  14112. _SG_OBJC_RELEASE(cp_desc);
  14113. if (nil == mtl_cps) {
  14114. SOKOL_ASSERT(err);
  14115. _SG_ERROR(METAL_CREATE_CPS_FAILED);
  14116. _SG_LOGMSG(METAL_CREATE_CPS_OUTPUT, [err.localizedDescription UTF8String]);
  14117. return SG_RESOURCESTATE_FAILED;
  14118. }
  14119. pip->mtl.cps = _sg_mtl_add_resource(mtl_cps);
  14120. _SG_OBJC_RELEASE(mtl_cps);
  14121. pip->mtl.threads_per_threadgroup = shd->mtl.threads_per_threadgroup;
  14122. } else {
  14123. sg_primitive_type prim_type = desc->primitive_type;
  14124. pip->mtl.prim_type = _sg_mtl_primitive_type(prim_type);
  14125. pip->mtl.index_size = _sg_mtl_index_size(pip->cmn.index_type);
  14126. if (SG_INDEXTYPE_NONE != pip->cmn.index_type) {
  14127. pip->mtl.index_type = _sg_mtl_index_type(pip->cmn.index_type);
  14128. }
  14129. pip->mtl.cull_mode = _sg_mtl_cull_mode(desc->cull_mode);
  14130. pip->mtl.winding = _sg_mtl_winding(desc->face_winding);
  14131. pip->mtl.stencil_ref = desc->stencil.ref;
  14132. // create vertex-descriptor
  14133. MTLVertexDescriptor* vtx_desc = [MTLVertexDescriptor vertexDescriptor];
  14134. for (NSUInteger attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  14135. const sg_vertex_attr_state* a_state = &desc->layout.attrs[attr_index];
  14136. if (a_state->format == SG_VERTEXFORMAT_INVALID) {
  14137. break;
  14138. }
  14139. SOKOL_ASSERT(a_state->buffer_index < SG_MAX_VERTEXBUFFER_BINDSLOTS);
  14140. SOKOL_ASSERT(pip->cmn.vertex_buffer_layout_active[a_state->buffer_index]);
  14141. vtx_desc.attributes[attr_index].format = _sg_mtl_vertex_format(a_state->format);
  14142. vtx_desc.attributes[attr_index].offset = (NSUInteger)a_state->offset;
  14143. vtx_desc.attributes[attr_index].bufferIndex = _sg_mtl_vertexbuffer_bindslot((size_t)a_state->buffer_index);
  14144. }
  14145. for (NSUInteger layout_index = 0; layout_index < SG_MAX_VERTEXBUFFER_BINDSLOTS; layout_index++) {
  14146. if (pip->cmn.vertex_buffer_layout_active[layout_index]) {
  14147. const sg_vertex_buffer_layout_state* l_state = &desc->layout.buffers[layout_index];
  14148. const NSUInteger mtl_vb_slot = _sg_mtl_vertexbuffer_bindslot(layout_index);
  14149. SOKOL_ASSERT(l_state->stride > 0);
  14150. vtx_desc.layouts[mtl_vb_slot].stride = (NSUInteger)l_state->stride;
  14151. vtx_desc.layouts[mtl_vb_slot].stepFunction = _sg_mtl_step_function(l_state->step_func);
  14152. vtx_desc.layouts[mtl_vb_slot].stepRate = (NSUInteger)l_state->step_rate;
  14153. }
  14154. }
  14155. // render-pipeline descriptor
  14156. MTLRenderPipelineDescriptor* rp_desc = [[MTLRenderPipelineDescriptor alloc] init];
  14157. rp_desc.vertexDescriptor = vtx_desc;
  14158. SOKOL_ASSERT(shd->mtl.vertex_func.mtl_func != _SG_MTL_INVALID_SLOT_INDEX);
  14159. rp_desc.vertexFunction = _sg_mtl_id(shd->mtl.vertex_func.mtl_func);
  14160. SOKOL_ASSERT(shd->mtl.fragment_func.mtl_func != _SG_MTL_INVALID_SLOT_INDEX);
  14161. rp_desc.fragmentFunction = _sg_mtl_id(shd->mtl.fragment_func.mtl_func);
  14162. rp_desc.rasterSampleCount = (NSUInteger)desc->sample_count;
  14163. rp_desc.alphaToCoverageEnabled = desc->alpha_to_coverage_enabled;
  14164. rp_desc.alphaToOneEnabled = NO;
  14165. rp_desc.rasterizationEnabled = YES;
  14166. rp_desc.depthAttachmentPixelFormat = _sg_mtl_pixel_format(desc->depth.pixel_format);
  14167. if (desc->depth.pixel_format == SG_PIXELFORMAT_DEPTH_STENCIL) {
  14168. rp_desc.stencilAttachmentPixelFormat = _sg_mtl_pixel_format(desc->depth.pixel_format);
  14169. }
  14170. for (NSUInteger i = 0; i < (NSUInteger)desc->color_count; i++) {
  14171. SOKOL_ASSERT(i < SG_MAX_COLOR_ATTACHMENTS);
  14172. const sg_color_target_state* cs = &desc->colors[i];
  14173. rp_desc.colorAttachments[i].pixelFormat = _sg_mtl_pixel_format(cs->pixel_format);
  14174. rp_desc.colorAttachments[i].writeMask = _sg_mtl_color_write_mask(cs->write_mask);
  14175. rp_desc.colorAttachments[i].blendingEnabled = cs->blend.enabled;
  14176. rp_desc.colorAttachments[i].alphaBlendOperation = _sg_mtl_blend_op(cs->blend.op_alpha);
  14177. rp_desc.colorAttachments[i].rgbBlendOperation = _sg_mtl_blend_op(cs->blend.op_rgb);
  14178. rp_desc.colorAttachments[i].destinationAlphaBlendFactor = _sg_mtl_blend_factor(cs->blend.dst_factor_alpha);
  14179. rp_desc.colorAttachments[i].destinationRGBBlendFactor = _sg_mtl_blend_factor(cs->blend.dst_factor_rgb);
  14180. rp_desc.colorAttachments[i].sourceAlphaBlendFactor = _sg_mtl_blend_factor(cs->blend.src_factor_alpha);
  14181. rp_desc.colorAttachments[i].sourceRGBBlendFactor = _sg_mtl_blend_factor(cs->blend.src_factor_rgb);
  14182. }
  14183. // Set buffer mutability for all buffers (vertex buffers and storage buffers).
  14184. // For vertex buffer it is guaranteed that neither the GPU nor CPU will update their content
  14185. // as long as it is in flight (since dynamic buffers are double-buffered, and vertex-buffers
  14186. // are not updated by the GPU).
  14187. // For storage buffer the same double-buffering applies, and if they are applied
  14188. // to the vertex- or fragment-stage must be declared as readonly in the shader.
  14189. for (size_t i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  14190. if (pip->cmn.vertex_buffer_layout_active[i]) {
  14191. const NSUInteger mtl_slot = _sg_mtl_vertexbuffer_bindslot(i);
  14192. rp_desc.vertexBuffers[mtl_slot].mutability = MTLMutabilityImmutable;
  14193. }
  14194. }
  14195. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  14196. const _sg_shader_view_t* view = &shd->cmn.views[i];
  14197. if (view->view_type != SG_VIEWTYPE_STORAGEBUFFER) {
  14198. continue;
  14199. }
  14200. const sg_shader_stage stage = view->stage;
  14201. SOKOL_ASSERT(view->stage != SG_SHADERSTAGE_COMPUTE);
  14202. SOKOL_ASSERT(view->sbuf_readonly);
  14203. const NSUInteger mtl_slot = shd->mtl.view_buffer_texture_n[i];
  14204. SOKOL_ASSERT(mtl_slot < _SG_MTL_MAX_STAGE_BUFFER_BINDINGS);
  14205. if (stage == SG_SHADERSTAGE_VERTEX) {
  14206. rp_desc.vertexBuffers[mtl_slot].mutability = MTLMutabilityImmutable;
  14207. } else if (stage == SG_SHADERSTAGE_FRAGMENT) {
  14208. rp_desc.fragmentBuffers[mtl_slot].mutability = MTLMutabilityImmutable;
  14209. }
  14210. }
  14211. #if defined(SOKOL_DEBUG)
  14212. if (desc->label) {
  14213. rp_desc.label = [NSString stringWithFormat:@"%s", desc->label];
  14214. }
  14215. #endif
  14216. NSError* err = NULL;
  14217. id<MTLRenderPipelineState> mtl_rps = [_sg.mtl.device newRenderPipelineStateWithDescriptor:rp_desc error:&err];
  14218. _SG_OBJC_RELEASE(rp_desc);
  14219. if (nil == mtl_rps) {
  14220. SOKOL_ASSERT(err);
  14221. _SG_ERROR(METAL_CREATE_RPS_FAILED);
  14222. _SG_LOGMSG(METAL_CREATE_RPS_OUTPUT, [err.localizedDescription UTF8String]);
  14223. return SG_RESOURCESTATE_FAILED;
  14224. }
  14225. pip->mtl.rps = _sg_mtl_add_resource(mtl_rps);
  14226. _SG_OBJC_RELEASE(mtl_rps);
  14227. // depth-stencil-state
  14228. MTLDepthStencilDescriptor* ds_desc = [[MTLDepthStencilDescriptor alloc] init];
  14229. ds_desc.depthCompareFunction = _sg_mtl_compare_func(desc->depth.compare);
  14230. ds_desc.depthWriteEnabled = desc->depth.write_enabled;
  14231. if (desc->stencil.enabled) {
  14232. const sg_stencil_face_state* sb = &desc->stencil.back;
  14233. ds_desc.backFaceStencil = [[MTLStencilDescriptor alloc] init];
  14234. ds_desc.backFaceStencil.stencilFailureOperation = _sg_mtl_stencil_op(sb->fail_op);
  14235. ds_desc.backFaceStencil.depthFailureOperation = _sg_mtl_stencil_op(sb->depth_fail_op);
  14236. ds_desc.backFaceStencil.depthStencilPassOperation = _sg_mtl_stencil_op(sb->pass_op);
  14237. ds_desc.backFaceStencil.stencilCompareFunction = _sg_mtl_compare_func(sb->compare);
  14238. ds_desc.backFaceStencil.readMask = desc->stencil.read_mask;
  14239. ds_desc.backFaceStencil.writeMask = desc->stencil.write_mask;
  14240. const sg_stencil_face_state* sf = &desc->stencil.front;
  14241. ds_desc.frontFaceStencil = [[MTLStencilDescriptor alloc] init];
  14242. ds_desc.frontFaceStencil.stencilFailureOperation = _sg_mtl_stencil_op(sf->fail_op);
  14243. ds_desc.frontFaceStencil.depthFailureOperation = _sg_mtl_stencil_op(sf->depth_fail_op);
  14244. ds_desc.frontFaceStencil.depthStencilPassOperation = _sg_mtl_stencil_op(sf->pass_op);
  14245. ds_desc.frontFaceStencil.stencilCompareFunction = _sg_mtl_compare_func(sf->compare);
  14246. ds_desc.frontFaceStencil.readMask = desc->stencil.read_mask;
  14247. ds_desc.frontFaceStencil.writeMask = desc->stencil.write_mask;
  14248. }
  14249. #if defined(SOKOL_DEBUG)
  14250. if (desc->label) {
  14251. ds_desc.label = [NSString stringWithFormat:@"%s.dss", desc->label];
  14252. }
  14253. #endif
  14254. id<MTLDepthStencilState> mtl_dss = [_sg.mtl.device newDepthStencilStateWithDescriptor:ds_desc];
  14255. _SG_OBJC_RELEASE(ds_desc);
  14256. if (nil == mtl_dss) {
  14257. _SG_ERROR(METAL_CREATE_DSS_FAILED);
  14258. return SG_RESOURCESTATE_FAILED;
  14259. }
  14260. pip->mtl.dss = _sg_mtl_add_resource(mtl_dss);
  14261. _SG_OBJC_RELEASE(mtl_dss);
  14262. }
  14263. return SG_RESOURCESTATE_VALID;
  14264. }
  14265. _SOKOL_PRIVATE void _sg_mtl_discard_pipeline(_sg_pipeline_t* pip) {
  14266. SOKOL_ASSERT(pip);
  14267. // it's valid to call release resource with a 'null resource'
  14268. _sg_mtl_release_resource(_sg.frame_index, pip->mtl.cps);
  14269. _sg_mtl_release_resource(_sg.frame_index, pip->mtl.rps);
  14270. _sg_mtl_release_resource(_sg.frame_index, pip->mtl.dss);
  14271. }
  14272. _SOKOL_PRIVATE sg_resource_state _sg_mtl_create_view(_sg_view_t* view, const sg_view_desc* desc) {
  14273. SOKOL_ASSERT(view && desc);
  14274. _SOKOL_UNUSED(desc);
  14275. if ((SG_VIEWTYPE_TEXTURE == view->cmn.type) || (SG_VIEWTYPE_STORAGEIMAGE == view->cmn.type)) {
  14276. const _sg_image_view_common_t* cmn = &view->cmn.img;
  14277. const _sg_image_t* img = _sg_image_ref_ptr(&cmn->ref);
  14278. SOKOL_ASSERT(cmn->mip_level_count >= 1);
  14279. SOKOL_ASSERT(cmn->slice_count >= 1);
  14280. for (int slot = 0; slot < img->cmn.num_slots; slot++) {
  14281. SOKOL_ASSERT(img->mtl.tex[slot] != _SG_MTL_INVALID_SLOT_INDEX);
  14282. id<MTLTexture> mtl_tex_view = [_sg_mtl_id(img->mtl.tex[slot])
  14283. newTextureViewWithPixelFormat: _sg_mtl_pixel_format(img->cmn.pixel_format)
  14284. textureType: _sg_mtl_texture_type(img->cmn.type, img->cmn.sample_count > 1)
  14285. levels: NSMakeRange((NSUInteger)cmn->mip_level, (NSUInteger)cmn->mip_level_count)
  14286. slices: NSMakeRange((NSUInteger)cmn->slice, (NSUInteger)cmn->slice_count)];
  14287. #if defined(SOKOL_DEBUG)
  14288. if (desc->label) {
  14289. mtl_tex_view.label = [NSString stringWithFormat:@"%s.%d", desc->label, slot];
  14290. }
  14291. #endif
  14292. view->mtl.tex_view[slot] = _sg_mtl_add_resource(mtl_tex_view);
  14293. _SG_OBJC_RELEASE(mtl_tex_view);
  14294. }
  14295. }
  14296. return SG_RESOURCESTATE_VALID;
  14297. }
  14298. _SOKOL_PRIVATE void _sg_mtl_discard_view(_sg_view_t* view) {
  14299. SOKOL_ASSERT(view);
  14300. for (size_t i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  14301. // it's valid to call _sg_mtl_release_resource with a null handle
  14302. _sg_mtl_release_resource(_sg.frame_index, view->mtl.tex_view[i]);
  14303. }
  14304. }
  14305. _SOKOL_PRIVATE void _sg_mtl_bind_uniform_buffers(void) {
  14306. // In the Metal backend, uniform buffer bindings happen once in sg_begin_pass() and
  14307. // remain valid for the entire pass. Only binding offsets will be updated
  14308. // in sg_apply_uniforms()
  14309. if (_sg.cur_pass.is_compute) {
  14310. SOKOL_ASSERT(nil != _sg.mtl.compute_cmd_encoder);
  14311. for (size_t slot = 0; slot < SG_MAX_UNIFORMBLOCK_BINDSLOTS; slot++) {
  14312. [_sg.mtl.compute_cmd_encoder
  14313. setBuffer:_sg.mtl.uniform_buffers[_sg.mtl.cur_frame_rotate_index]
  14314. offset:0
  14315. atIndex:slot];
  14316. }
  14317. } else {
  14318. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14319. for (size_t slot = 0; slot < SG_MAX_UNIFORMBLOCK_BINDSLOTS; slot++) {
  14320. [_sg.mtl.render_cmd_encoder
  14321. setVertexBuffer:_sg.mtl.uniform_buffers[_sg.mtl.cur_frame_rotate_index]
  14322. offset:0
  14323. atIndex:slot];
  14324. [_sg.mtl.render_cmd_encoder
  14325. setFragmentBuffer:_sg.mtl.uniform_buffers[_sg.mtl.cur_frame_rotate_index]
  14326. offset:0
  14327. atIndex:slot];
  14328. }
  14329. }
  14330. }
  14331. _SOKOL_PRIVATE void _sg_mtl_begin_compute_pass(const sg_pass* pass) {
  14332. SOKOL_ASSERT(pass); (void)pass;
  14333. SOKOL_ASSERT(nil != _sg.mtl.cmd_buffer);
  14334. SOKOL_ASSERT(nil == _sg.mtl.compute_cmd_encoder);
  14335. SOKOL_ASSERT(nil == _sg.mtl.render_cmd_encoder);
  14336. _sg.mtl.compute_cmd_encoder = [_sg.mtl.cmd_buffer computeCommandEncoder];
  14337. if (nil == _sg.mtl.compute_cmd_encoder) {
  14338. _sg.cur_pass.valid = false;
  14339. return;
  14340. }
  14341. #if defined(SOKOL_DEBUG)
  14342. if (pass->label) {
  14343. _sg.mtl.compute_cmd_encoder.label = [NSString stringWithUTF8String:pass->label];
  14344. }
  14345. #endif
  14346. }
  14347. _SOKOL_PRIVATE void _sg_mtl_begin_render_pass(const sg_pass* pass, const _sg_attachments_ptrs_t* atts) {
  14348. SOKOL_ASSERT(pass && atts);
  14349. SOKOL_ASSERT(nil != _sg.mtl.cmd_buffer);
  14350. SOKOL_ASSERT(nil == _sg.mtl.render_cmd_encoder);
  14351. SOKOL_ASSERT(nil == _sg.mtl.compute_cmd_encoder);
  14352. const sg_swapchain* swapchain = &pass->swapchain;
  14353. const sg_pass_action* action = &pass->action;
  14354. MTLRenderPassDescriptor* pass_desc = [MTLRenderPassDescriptor renderPassDescriptor];
  14355. SOKOL_ASSERT(pass_desc);
  14356. if (!atts->empty) {
  14357. // setup pass descriptor for offscreen rendering
  14358. for (NSUInteger i = 0; i < (NSUInteger)atts->num_color_views; i++) {
  14359. const _sg_view_t* color_view = atts->color_views[i];
  14360. SOKOL_ASSERT(color_view);
  14361. const _sg_view_t* resolve_view = atts->resolve_views[i];
  14362. const _sg_image_t* color_img = _sg_image_ref_ptr(&color_view->cmn.img.ref);
  14363. SOKOL_ASSERT(color_img->slot.state == SG_RESOURCESTATE_VALID);
  14364. SOKOL_ASSERT(color_img->cmn.active_slot == 0);
  14365. SOKOL_ASSERT(color_img->mtl.tex[0] != _SG_MTL_INVALID_SLOT_INDEX);
  14366. pass_desc.colorAttachments[i].loadAction = _sg_mtl_load_action(action->colors[i].load_action);
  14367. pass_desc.colorAttachments[i].storeAction = _sg_mtl_store_action(action->colors[i].store_action, resolve_view != 0);
  14368. sg_color c = action->colors[i].clear_value;
  14369. pass_desc.colorAttachments[i].clearColor = MTLClearColorMake(c.r, c.g, c.b, c.a);
  14370. pass_desc.colorAttachments[i].texture = _sg_mtl_id(color_img->mtl.tex[0]);
  14371. pass_desc.colorAttachments[i].level = (NSUInteger)color_view->cmn.img.mip_level;
  14372. switch (color_img->cmn.type) {
  14373. case SG_IMAGETYPE_CUBE:
  14374. case SG_IMAGETYPE_ARRAY:
  14375. pass_desc.colorAttachments[i].slice = (NSUInteger)color_view->cmn.img.slice;
  14376. break;
  14377. case SG_IMAGETYPE_3D:
  14378. pass_desc.colorAttachments[i].depthPlane = (NSUInteger)color_view->cmn.img.slice;
  14379. break;
  14380. default: break;
  14381. }
  14382. if (resolve_view) {
  14383. const _sg_image_t* resolve_img = _sg_image_ref_ptr(&resolve_view->cmn.img.ref);
  14384. SOKOL_ASSERT(resolve_img->slot.state == SG_RESOURCESTATE_VALID);
  14385. SOKOL_ASSERT(resolve_img->cmn.active_slot == 0);
  14386. SOKOL_ASSERT(resolve_img->mtl.tex[0] != _SG_MTL_INVALID_SLOT_INDEX);
  14387. pass_desc.colorAttachments[i].resolveTexture = _sg_mtl_id(resolve_img->mtl.tex[0]);
  14388. pass_desc.colorAttachments[i].resolveLevel = (NSUInteger)resolve_view->cmn.img.mip_level;
  14389. switch (resolve_img->cmn.type) {
  14390. case SG_IMAGETYPE_CUBE:
  14391. case SG_IMAGETYPE_ARRAY:
  14392. pass_desc.colorAttachments[i].resolveSlice = (NSUInteger)resolve_view->cmn.img.slice;
  14393. break;
  14394. case SG_IMAGETYPE_3D:
  14395. pass_desc.colorAttachments[i].resolveDepthPlane = (NSUInteger)resolve_view->cmn.img.slice;
  14396. break;
  14397. default: break;
  14398. }
  14399. }
  14400. }
  14401. if (atts->ds_view) {
  14402. const _sg_view_t* ds_view = atts->ds_view;
  14403. const _sg_image_t* ds_img = _sg_image_ref_ptr(&ds_view->cmn.img.ref);
  14404. SOKOL_ASSERT(ds_img->slot.state == SG_RESOURCESTATE_VALID);
  14405. SOKOL_ASSERT(ds_img->cmn.active_slot == 0);
  14406. SOKOL_ASSERT(ds_img->mtl.tex[0] != _SG_MTL_INVALID_SLOT_INDEX);
  14407. pass_desc.depthAttachment.texture = _sg_mtl_id(ds_img->mtl.tex[0]);
  14408. pass_desc.depthAttachment.loadAction = _sg_mtl_load_action(action->depth.load_action);
  14409. pass_desc.depthAttachment.storeAction = _sg_mtl_store_action(action->depth.store_action, false);
  14410. pass_desc.depthAttachment.clearDepth = action->depth.clear_value;
  14411. switch (ds_img->cmn.type) {
  14412. case SG_IMAGETYPE_CUBE:
  14413. case SG_IMAGETYPE_ARRAY:
  14414. pass_desc.depthAttachment.slice = (NSUInteger)ds_view->cmn.img.slice;
  14415. break;
  14416. case SG_IMAGETYPE_3D:
  14417. pass_desc.depthAttachment.resolveDepthPlane = (NSUInteger)ds_view->cmn.img.slice;
  14418. break;
  14419. default: break;
  14420. }
  14421. if (_sg_is_depth_stencil_format(ds_img->cmn.pixel_format)) {
  14422. pass_desc.stencilAttachment.texture = _sg_mtl_id(ds_img->mtl.tex[0]);
  14423. pass_desc.stencilAttachment.loadAction = _sg_mtl_load_action(action->stencil.load_action);
  14424. pass_desc.stencilAttachment.storeAction = _sg_mtl_store_action(action->depth.store_action, false);
  14425. pass_desc.stencilAttachment.clearStencil = action->stencil.clear_value;
  14426. switch (ds_img->cmn.type) {
  14427. case SG_IMAGETYPE_CUBE:
  14428. case SG_IMAGETYPE_ARRAY:
  14429. pass_desc.stencilAttachment.slice = (NSUInteger)ds_view->cmn.img.slice;
  14430. break;
  14431. case SG_IMAGETYPE_3D:
  14432. pass_desc.stencilAttachment.resolveDepthPlane = (NSUInteger)ds_view->cmn.img.slice;
  14433. break;
  14434. default: break;
  14435. }
  14436. }
  14437. }
  14438. } else {
  14439. // setup pass descriptor for swapchain rendering
  14440. //
  14441. // NOTE: at least in macOS Sonoma this no longer seems to be the case, the
  14442. // current drawable is also valid in a minimized window
  14443. // ===
  14444. // an MTKView current_drawable will not be valid if window is minimized, don't do any rendering in this case
  14445. if (0 == swapchain->metal.current_drawable) {
  14446. _sg.cur_pass.valid = false;
  14447. return;
  14448. }
  14449. // pin the swapchain resources into memory so that they outlive their command buffer
  14450. // (this is necessary because the command buffer doesn't retain references)
  14451. int pass_desc_ref = _sg_mtl_add_resource(pass_desc);
  14452. _sg_mtl_release_resource(_sg.frame_index, pass_desc_ref);
  14453. _sg.mtl.cur_drawable = (__bridge id<CAMetalDrawable>) swapchain->metal.current_drawable;
  14454. if (swapchain->sample_count > 1) {
  14455. // multi-sampling: render into msaa texture, resolve into drawable texture
  14456. id<MTLTexture> msaa_tex = (__bridge id<MTLTexture>) swapchain->metal.msaa_color_texture;
  14457. SOKOL_ASSERT(msaa_tex != nil);
  14458. pass_desc.colorAttachments[0].texture = msaa_tex;
  14459. pass_desc.colorAttachments[0].resolveTexture = _sg.mtl.cur_drawable.texture;
  14460. pass_desc.colorAttachments[0].storeAction = MTLStoreActionMultisampleResolve;
  14461. } else {
  14462. // non-msaa: render into current_drawable
  14463. pass_desc.colorAttachments[0].texture = _sg.mtl.cur_drawable.texture;
  14464. pass_desc.colorAttachments[0].storeAction = MTLStoreActionStore;
  14465. }
  14466. pass_desc.colorAttachments[0].loadAction = _sg_mtl_load_action(action->colors[0].load_action);
  14467. const sg_color c = action->colors[0].clear_value;
  14468. pass_desc.colorAttachments[0].clearColor = MTLClearColorMake(c.r, c.g, c.b, c.a);
  14469. // optional depth-stencil texture
  14470. if (swapchain->metal.depth_stencil_texture) {
  14471. id<MTLTexture> ds_tex = (__bridge id<MTLTexture>) swapchain->metal.depth_stencil_texture;
  14472. SOKOL_ASSERT(ds_tex != nil);
  14473. pass_desc.depthAttachment.texture = ds_tex;
  14474. pass_desc.depthAttachment.storeAction = MTLStoreActionDontCare;
  14475. pass_desc.depthAttachment.loadAction = _sg_mtl_load_action(action->depth.load_action);
  14476. pass_desc.depthAttachment.clearDepth = action->depth.clear_value;
  14477. if (_sg_is_depth_stencil_format(swapchain->depth_format)) {
  14478. pass_desc.stencilAttachment.texture = ds_tex;
  14479. pass_desc.stencilAttachment.storeAction = MTLStoreActionDontCare;
  14480. pass_desc.stencilAttachment.loadAction = _sg_mtl_load_action(action->stencil.load_action);
  14481. pass_desc.stencilAttachment.clearStencil = action->stencil.clear_value;
  14482. }
  14483. }
  14484. }
  14485. // NOTE: at least in macOS Sonoma, the following is no longer the case, a valid
  14486. // render command encoder is also returned in a minimized window
  14487. // ===
  14488. // create a render command encoder, this might return nil if window is minimized
  14489. _sg.mtl.render_cmd_encoder = [_sg.mtl.cmd_buffer renderCommandEncoderWithDescriptor:pass_desc];
  14490. if (nil == _sg.mtl.render_cmd_encoder) {
  14491. _sg.cur_pass.valid = false;
  14492. return;
  14493. }
  14494. #if defined(SOKOL_DEBUG)
  14495. if (pass->label) {
  14496. _sg.mtl.render_cmd_encoder.label = [NSString stringWithUTF8String:pass->label];
  14497. }
  14498. #endif
  14499. }
  14500. _SOKOL_PRIVATE void _sg_mtl_begin_pass(const sg_pass* pass, const _sg_attachments_ptrs_t* atts) {
  14501. SOKOL_ASSERT(pass && atts);
  14502. SOKOL_ASSERT(_sg.mtl.cmd_queue);
  14503. SOKOL_ASSERT(nil == _sg.mtl.compute_cmd_encoder);
  14504. SOKOL_ASSERT(nil == _sg.mtl.render_cmd_encoder);
  14505. SOKOL_ASSERT(nil == _sg.mtl.cur_drawable);
  14506. _sg_mtl_clear_state_cache();
  14507. // if this is the first pass in the frame, create one command buffer and blit-cmd-encoder for the entire frame
  14508. if (nil == _sg.mtl.cmd_buffer) {
  14509. // block until the oldest frame in flight has finished
  14510. dispatch_semaphore_wait(_sg.mtl.sem, DISPATCH_TIME_FOREVER);
  14511. if (_sg.desc.metal.use_command_buffer_with_retained_references) {
  14512. _sg.mtl.cmd_buffer = [_sg.mtl.cmd_queue commandBuffer];
  14513. } else {
  14514. _sg.mtl.cmd_buffer = [_sg.mtl.cmd_queue commandBufferWithUnretainedReferences];
  14515. }
  14516. [_sg.mtl.cmd_buffer enqueue];
  14517. [_sg.mtl.cmd_buffer addCompletedHandler:^(id<MTLCommandBuffer> cmd_buf) {
  14518. // NOTE: this code is called on a different thread!
  14519. _SOKOL_UNUSED(cmd_buf);
  14520. dispatch_semaphore_signal(_sg.mtl.sem);
  14521. }];
  14522. }
  14523. // if this is first pass in frame, get uniform buffer base pointer
  14524. if (0 == _sg.mtl.cur_ub_base_ptr) {
  14525. _sg.mtl.cur_ub_base_ptr = (uint8_t*)[_sg.mtl.uniform_buffers[_sg.mtl.cur_frame_rotate_index] contents];
  14526. }
  14527. if (pass->compute) {
  14528. _sg_mtl_begin_compute_pass(pass);
  14529. } else {
  14530. _sg_mtl_begin_render_pass(pass, atts);
  14531. }
  14532. // bind uniform buffers, those bindings remain valid for the entire pass
  14533. if (_sg.cur_pass.valid) {
  14534. _sg_mtl_bind_uniform_buffers();
  14535. }
  14536. }
  14537. _SOKOL_PRIVATE void _sg_mtl_end_pass(const _sg_attachments_ptrs_t* atts) {
  14538. _SOKOL_UNUSED(atts);
  14539. if (nil != _sg.mtl.render_cmd_encoder) {
  14540. [_sg.mtl.render_cmd_encoder endEncoding];
  14541. // NOTE: MTLRenderCommandEncoder is autoreleased
  14542. _sg.mtl.render_cmd_encoder = nil;
  14543. }
  14544. if (nil != _sg.mtl.compute_cmd_encoder) {
  14545. [_sg.mtl.compute_cmd_encoder endEncoding];
  14546. // NOTE: MTLComputeCommandEncoder is autoreleased
  14547. _sg.mtl.compute_cmd_encoder = nil;
  14548. }
  14549. // if this is a swapchain pass, present the drawable
  14550. if (nil != _sg.mtl.cur_drawable) {
  14551. [_sg.mtl.cmd_buffer presentDrawable:_sg.mtl.cur_drawable];
  14552. _sg.mtl.cur_drawable = nil;
  14553. }
  14554. }
  14555. _SOKOL_PRIVATE void _sg_mtl_commit(void) {
  14556. SOKOL_ASSERT(nil == _sg.mtl.render_cmd_encoder);
  14557. SOKOL_ASSERT(nil == _sg.mtl.compute_cmd_encoder);
  14558. SOKOL_ASSERT(nil != _sg.mtl.cmd_buffer);
  14559. // commit the frame's command buffer
  14560. [_sg.mtl.cmd_buffer commit];
  14561. // garbage-collect resources pending for release
  14562. _sg_mtl_garbage_collect(_sg.frame_index);
  14563. // rotate uniform buffer slot
  14564. if (++_sg.mtl.cur_frame_rotate_index >= SG_NUM_INFLIGHT_FRAMES) {
  14565. _sg.mtl.cur_frame_rotate_index = 0;
  14566. }
  14567. _sg.mtl.cur_ub_offset = 0;
  14568. _sg.mtl.cur_ub_base_ptr = 0;
  14569. // NOTE: MTLCommandBuffer is autoreleased
  14570. _sg.mtl.cmd_buffer = nil;
  14571. }
  14572. _SOKOL_PRIVATE void _sg_mtl_apply_viewport(int x, int y, int w, int h, bool origin_top_left) {
  14573. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14574. SOKOL_ASSERT(_sg.cur_pass.dim.height > 0);
  14575. MTLViewport vp;
  14576. vp.originX = (double) x;
  14577. vp.originY = (double) (origin_top_left ? y : (_sg.cur_pass.dim.height - (y + h)));
  14578. vp.width = (double) w;
  14579. vp.height = (double) h;
  14580. vp.znear = 0.0;
  14581. vp.zfar = 1.0;
  14582. [_sg.mtl.render_cmd_encoder setViewport:vp];
  14583. }
  14584. _SOKOL_PRIVATE void _sg_mtl_apply_scissor_rect(int x, int y, int w, int h, bool origin_top_left) {
  14585. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14586. SOKOL_ASSERT(_sg.cur_pass.dim.width > 0);
  14587. SOKOL_ASSERT(_sg.cur_pass.dim.height > 0);
  14588. // clip against framebuffer rect
  14589. const _sg_recti_t clip = _sg_clipi(x, y, w, h, _sg.cur_pass.dim.width, _sg.cur_pass.dim.height);
  14590. MTLScissorRect r;
  14591. r.x = (NSUInteger)clip.x;
  14592. r.y = (NSUInteger) (origin_top_left ? clip.y : (_sg.cur_pass.dim.height - (clip.y + clip.h)));
  14593. r.width = (NSUInteger)clip.w;
  14594. r.height = (NSUInteger)clip.h;
  14595. [_sg.mtl.render_cmd_encoder setScissorRect:r];
  14596. }
  14597. _SOKOL_PRIVATE void _sg_mtl_apply_pipeline(_sg_pipeline_t* pip) {
  14598. SOKOL_ASSERT(pip);
  14599. if (!_sg_sref_slot_eql(&_sg.mtl.cache.cur_pip, &pip->slot)) {
  14600. _sg.mtl.cache.cur_pip = _sg_sref(&pip->slot);
  14601. if (pip->cmn.is_compute) {
  14602. SOKOL_ASSERT(_sg.cur_pass.is_compute);
  14603. SOKOL_ASSERT(nil != _sg.mtl.compute_cmd_encoder);
  14604. SOKOL_ASSERT(pip->mtl.cps != _SG_MTL_INVALID_SLOT_INDEX);
  14605. [_sg.mtl.compute_cmd_encoder setComputePipelineState:_sg_mtl_id(pip->mtl.cps)];
  14606. } else {
  14607. SOKOL_ASSERT(!_sg.cur_pass.is_compute);
  14608. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14609. sg_color c = pip->cmn.blend_color;
  14610. [_sg.mtl.render_cmd_encoder setBlendColorRed:c.r green:c.g blue:c.b alpha:c.a];
  14611. _sg_stats_inc(metal.pipeline.num_set_blend_color);
  14612. [_sg.mtl.render_cmd_encoder setCullMode:pip->mtl.cull_mode];
  14613. _sg_stats_inc(metal.pipeline.num_set_cull_mode);
  14614. [_sg.mtl.render_cmd_encoder setFrontFacingWinding:pip->mtl.winding];
  14615. _sg_stats_inc(metal.pipeline.num_set_front_facing_winding);
  14616. [_sg.mtl.render_cmd_encoder setStencilReferenceValue:pip->mtl.stencil_ref];
  14617. _sg_stats_inc(metal.pipeline.num_set_stencil_reference_value);
  14618. [_sg.mtl.render_cmd_encoder setDepthBias:pip->cmn.depth.bias slopeScale:pip->cmn.depth.bias_slope_scale clamp:pip->cmn.depth.bias_clamp];
  14619. _sg_stats_inc(metal.pipeline.num_set_depth_bias);
  14620. SOKOL_ASSERT(pip->mtl.rps != _SG_MTL_INVALID_SLOT_INDEX);
  14621. [_sg.mtl.render_cmd_encoder setRenderPipelineState:_sg_mtl_id(pip->mtl.rps)];
  14622. _sg_stats_inc(metal.pipeline.num_set_render_pipeline_state);
  14623. SOKOL_ASSERT(pip->mtl.dss != _SG_MTL_INVALID_SLOT_INDEX);
  14624. [_sg.mtl.render_cmd_encoder setDepthStencilState:_sg_mtl_id(pip->mtl.dss)];
  14625. _sg_stats_inc(metal.pipeline.num_set_depth_stencil_state);
  14626. }
  14627. }
  14628. }
  14629. _SOKOL_PRIVATE int _sg_mtl_cache_buf_cmp(const _sg_mtl_cache_buf_t* item, const _sg_slot_t* slot, int active_slot, int offset) {
  14630. int res = _SG_MTL_CACHE_CMP_EQUAL;
  14631. if (!_sg_sref_slot_eql(&item->sref, slot)) {
  14632. res |= _SG_MTL_CACHE_CMP_SREF;
  14633. }
  14634. if (item->active_slot != active_slot) {
  14635. res |= _SG_MTL_CACHE_CMP_ACTIVESLOT;
  14636. }
  14637. if (item->offset != offset) {
  14638. res |= _SG_MTL_CACHE_CMP_OFFSET;
  14639. }
  14640. return res;
  14641. }
  14642. _SOKOL_PRIVATE void _sg_mtl_cache_buf_upd(_sg_mtl_cache_buf_t* item, const _sg_slot_t* slot, int active_slot, int offset) {
  14643. item->sref = _sg_sref(slot);
  14644. item->offset = offset;
  14645. item->active_slot = active_slot;
  14646. }
  14647. _SOKOL_PRIVATE int _sg_mtl_cache_tex_cmp(const _sg_mtl_cache_tex_t* item, const _sg_slot_t* slot, int active_slot) {
  14648. int res = _SG_MTL_CACHE_CMP_EQUAL;
  14649. if (!_sg_sref_slot_eql(&item->sref, slot)) {
  14650. res |= _SG_MTL_CACHE_CMP_SREF;
  14651. }
  14652. if (item->active_slot != active_slot) {
  14653. res |= _SG_MTL_CACHE_CMP_ACTIVESLOT;
  14654. }
  14655. return res;
  14656. }
  14657. _SOKOL_PRIVATE void _sg_mtl_cache_tex_upd(_sg_mtl_cache_tex_t* item, const _sg_slot_t* slot, int active_slot) {
  14658. item->sref = _sg_sref(slot);
  14659. item->active_slot = active_slot;
  14660. }
  14661. _SOKOL_PRIVATE bool _sg_mtl_apply_bindings(_sg_bindings_ptrs_t* bnd) {
  14662. SOKOL_ASSERT(bnd);
  14663. SOKOL_ASSERT(bnd->pip);
  14664. const _sg_shader_t* shd = _sg_shader_ref_ptr(&bnd->pip->cmn.shader);
  14665. // don't set vertex- and index-buffers in compute passes
  14666. if (!_sg.cur_pass.is_compute) {
  14667. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14668. // store index buffer binding, this will be needed later in sg_draw()
  14669. _sg.mtl.cache.cur_ibuf = _sg_buffer_ref(bnd->ib);
  14670. _sg.mtl.cache.cur_ibuf_offset = bnd->ib_offset;
  14671. if (bnd->ib) {
  14672. SOKOL_ASSERT(bnd->pip->cmn.index_type != SG_INDEXTYPE_NONE);
  14673. } else {
  14674. SOKOL_ASSERT(bnd->pip->cmn.index_type == SG_INDEXTYPE_NONE);
  14675. }
  14676. // apply vertex buffers
  14677. for (size_t i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  14678. const _sg_buffer_t* vb = bnd->vbs[i];
  14679. if (vb == 0) {
  14680. continue;
  14681. }
  14682. const NSUInteger mtl_slot = _sg_mtl_vertexbuffer_bindslot(i);
  14683. SOKOL_ASSERT(mtl_slot < _SG_MTL_MAX_STAGE_BUFFER_BINDINGS);
  14684. const int active_slot = vb->cmn.active_slot;
  14685. SOKOL_ASSERT(vb->mtl.buf[active_slot] != _SG_MTL_INVALID_SLOT_INDEX);
  14686. const int offset = bnd->vb_offsets[i];
  14687. _sg_mtl_cache_buf_t* cache_item = &_sg.mtl.cache.cur_vsbufs[i];
  14688. const int cmp = _sg_mtl_cache_buf_cmp(cache_item, &vb->slot, active_slot, offset);
  14689. if (cmp != _SG_MTL_CACHE_CMP_EQUAL) {
  14690. _sg_mtl_cache_buf_upd(cache_item, &vb->slot, active_slot, offset);
  14691. if (0 == (cmp & ~_SG_MTL_CACHE_CMP_OFFSET)) {
  14692. // only vertex buffer offset has changed
  14693. [_sg.mtl.render_cmd_encoder setVertexBufferOffset:(NSUInteger)offset atIndex:mtl_slot];
  14694. _sg_stats_inc(metal.bindings.num_set_vertex_buffer_offset);
  14695. } else {
  14696. [_sg.mtl.render_cmd_encoder setVertexBuffer:_sg_mtl_id(vb->mtl.buf[active_slot]) offset:(NSUInteger)offset atIndex:mtl_slot];
  14697. _sg_stats_inc(metal.bindings.num_set_vertex_buffer);
  14698. }
  14699. } else {
  14700. _sg_stats_inc(metal.bindings.num_skip_redundant_vertex_buffer);
  14701. }
  14702. }
  14703. }
  14704. // apply view bindings (textures, storage images, storage buffers)
  14705. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  14706. const _sg_view_t* view = bnd->views[i];
  14707. if (0 == view) {
  14708. continue;
  14709. }
  14710. const _sg_shader_view_t* shd_view = &shd->cmn.views[i];
  14711. const sg_shader_stage stage = shd_view->stage;
  14712. SOKOL_ASSERT((stage == SG_SHADERSTAGE_VERTEX)
  14713. || (stage == SG_SHADERSTAGE_FRAGMENT)
  14714. || (stage == SG_SHADERSTAGE_COMPUTE));
  14715. SOKOL_ASSERT((shd_view->view_type == SG_VIEWTYPE_TEXTURE)
  14716. || (shd_view->view_type == SG_VIEWTYPE_STORAGEBUFFER)
  14717. || (shd_view->view_type == SG_VIEWTYPE_STORAGEIMAGE));
  14718. const NSUInteger mtl_slot = shd->mtl.view_buffer_texture_n[i];
  14719. // same handling for textures and storage images
  14720. if ((shd_view->view_type == SG_VIEWTYPE_TEXTURE) || (shd_view->view_type == SG_VIEWTYPE_STORAGEIMAGE)) {
  14721. SOKOL_ASSERT(mtl_slot < _SG_MTL_MAX_STAGE_TEXTURE_BINDINGS);
  14722. const int active_slot = _sg_image_ref_ptr(&view->cmn.img.ref)->cmn.active_slot;
  14723. SOKOL_ASSERT(view->mtl.tex_view[active_slot] != _SG_MTL_INVALID_SLOT_INDEX);
  14724. if (stage == SG_SHADERSTAGE_VERTEX) {
  14725. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14726. _sg_mtl_cache_tex_t* cache_item = &_sg.mtl.cache.cur_vstexs[mtl_slot];
  14727. const int cmp = _sg_mtl_cache_tex_cmp(cache_item, &view->slot, active_slot);
  14728. if (cmp != _SG_MTL_CACHE_CMP_EQUAL) {
  14729. _sg_mtl_cache_tex_upd(cache_item, &view->slot, active_slot);
  14730. [_sg.mtl.render_cmd_encoder setVertexTexture:_sg_mtl_id(view->mtl.tex_view[active_slot]) atIndex:mtl_slot];
  14731. _sg_stats_inc(metal.bindings.num_set_vertex_texture);
  14732. } else {
  14733. _sg_stats_inc(metal.bindings.num_skip_redundant_vertex_texture);
  14734. }
  14735. } else if (stage == SG_SHADERSTAGE_FRAGMENT) {
  14736. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14737. _sg_mtl_cache_tex_t* cache_item = &_sg.mtl.cache.cur_fstexs[mtl_slot];
  14738. const int cmp = _sg_mtl_cache_tex_cmp(cache_item, &view->slot, active_slot);
  14739. if (cmp != _SG_MTL_CACHE_CMP_EQUAL) {
  14740. _sg_mtl_cache_tex_upd(cache_item, &view->slot, active_slot);
  14741. [_sg.mtl.render_cmd_encoder setFragmentTexture:_sg_mtl_id(view->mtl.tex_view[active_slot]) atIndex:mtl_slot];
  14742. _sg_stats_inc(metal.bindings.num_set_fragment_texture);
  14743. } else {
  14744. _sg_stats_inc(metal.bindings.num_skip_redundant_fragment_texture);
  14745. }
  14746. } else if (stage == SG_SHADERSTAGE_COMPUTE) {
  14747. SOKOL_ASSERT(nil != _sg.mtl.compute_cmd_encoder);
  14748. _sg_mtl_cache_tex_t* cache_item = &_sg.mtl.cache.cur_cstexs[mtl_slot];
  14749. const int cmp = _sg_mtl_cache_tex_cmp(cache_item, &view->slot, active_slot);
  14750. if (cmp != _SG_MTL_CACHE_CMP_EQUAL) {
  14751. _sg_mtl_cache_tex_upd(cache_item, &view->slot, active_slot);
  14752. [_sg.mtl.compute_cmd_encoder setTexture:_sg_mtl_id(view->mtl.tex_view[active_slot]) atIndex:mtl_slot];
  14753. _sg_stats_inc(metal.bindings.num_set_compute_texture);
  14754. } else {
  14755. _sg_stats_inc(metal.bindings.num_skip_redundant_compute_texture);
  14756. }
  14757. } else SOKOL_UNREACHABLE;
  14758. } else if (shd_view->view_type == SG_VIEWTYPE_STORAGEBUFFER) {
  14759. SOKOL_ASSERT(mtl_slot < _SG_MTL_MAX_STAGE_UB_SBUF_BINDINGS);
  14760. const _sg_buffer_t* sbuf = _sg_buffer_ref_ptr(&view->cmn.buf.ref);
  14761. const int active_slot = sbuf->cmn.active_slot;
  14762. SOKOL_ASSERT(sbuf->mtl.buf[sbuf->cmn.active_slot] != _SG_MTL_INVALID_SLOT_INDEX);
  14763. const int offset = view->cmn.buf.offset;
  14764. if (stage == SG_SHADERSTAGE_VERTEX) {
  14765. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14766. _sg_mtl_cache_buf_t* cache_item = &_sg.mtl.cache.cur_vsbufs[mtl_slot];
  14767. const int cmp = _sg_mtl_cache_buf_cmp(cache_item, &sbuf->slot, active_slot, offset);
  14768. if (cmp != _SG_MTL_CACHE_CMP_EQUAL) {
  14769. _sg_mtl_cache_buf_upd(cache_item, &sbuf->slot, active_slot, offset);
  14770. if (0 == (cmp & ~_SG_MTL_CACHE_CMP_OFFSET)) {
  14771. // only offset has changed
  14772. [_sg.mtl.render_cmd_encoder setVertexBufferOffset:(NSUInteger)offset atIndex:mtl_slot];
  14773. _sg_stats_inc(metal.bindings.num_set_vertex_buffer_offset);
  14774. } else {
  14775. [_sg.mtl.render_cmd_encoder setVertexBuffer:_sg_mtl_id(sbuf->mtl.buf[sbuf->cmn.active_slot]) offset:(NSUInteger)offset atIndex:mtl_slot];
  14776. _sg_stats_inc(metal.bindings.num_set_vertex_buffer);
  14777. }
  14778. } else {
  14779. _sg_stats_inc(metal.bindings.num_skip_redundant_vertex_buffer);
  14780. }
  14781. } else if (stage == SG_SHADERSTAGE_FRAGMENT) {
  14782. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14783. _sg_mtl_cache_buf_t* cache_item = &_sg.mtl.cache.cur_fsbufs[mtl_slot];
  14784. const int cmp = _sg_mtl_cache_buf_cmp(cache_item, &sbuf->slot, active_slot, offset);
  14785. if (cmp != _SG_MTL_CACHE_CMP_EQUAL) {
  14786. _sg_mtl_cache_buf_upd(cache_item, &sbuf->slot, active_slot, offset);
  14787. if (0 == (cmp & ~_SG_MTL_CACHE_CMP_OFFSET)) {
  14788. // only offset has changed
  14789. [_sg.mtl.render_cmd_encoder setFragmentBufferOffset:(NSUInteger)offset atIndex:mtl_slot];
  14790. _sg_stats_inc(metal.bindings.num_set_fragment_buffer_offset);
  14791. } else {
  14792. [_sg.mtl.render_cmd_encoder setFragmentBuffer:_sg_mtl_id(sbuf->mtl.buf[active_slot]) offset:(NSUInteger)offset atIndex:mtl_slot];
  14793. _sg_stats_inc(metal.bindings.num_set_fragment_buffer);
  14794. }
  14795. } else {
  14796. _sg_stats_inc(metal.bindings.num_skip_redundant_fragment_buffer);
  14797. }
  14798. } else if (stage == SG_SHADERSTAGE_COMPUTE) {
  14799. SOKOL_ASSERT(nil != _sg.mtl.compute_cmd_encoder);
  14800. _sg_mtl_cache_buf_t* cache_item = &_sg.mtl.cache.cur_csbufs[mtl_slot];
  14801. const int cmp = _sg_mtl_cache_buf_cmp(cache_item, &sbuf->slot, active_slot, offset);
  14802. if (cmp != _SG_MTL_CACHE_CMP_EQUAL) {
  14803. _sg_mtl_cache_buf_upd(cache_item, &sbuf->slot, active_slot, offset);
  14804. if (0 == (cmp & ~_SG_MTL_CACHE_CMP_OFFSET)) {
  14805. // only offset has changed
  14806. [_sg.mtl.compute_cmd_encoder setBufferOffset:(NSUInteger)offset atIndex:mtl_slot];
  14807. _sg_stats_inc(metal.bindings.num_set_compute_buffer_offset);
  14808. } else {
  14809. [_sg.mtl.compute_cmd_encoder setBuffer:_sg_mtl_id(sbuf->mtl.buf[active_slot]) offset:(NSUInteger)offset atIndex:mtl_slot];
  14810. _sg_stats_inc(metal.bindings.num_set_compute_buffer);
  14811. }
  14812. } else {
  14813. _sg_stats_inc(metal.bindings.num_skip_redundant_compute_buffer);
  14814. }
  14815. }
  14816. } else SOKOL_UNREACHABLE;
  14817. }
  14818. // apply sampler bindings
  14819. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  14820. const _sg_sampler_t* smp = bnd->smps[i];
  14821. if (smp == 0) {
  14822. continue;
  14823. }
  14824. SOKOL_ASSERT(smp->mtl.sampler_state != _SG_MTL_INVALID_SLOT_INDEX);
  14825. const sg_shader_stage stage = shd->cmn.samplers[i].stage;
  14826. SOKOL_ASSERT((stage == SG_SHADERSTAGE_VERTEX) || (stage == SG_SHADERSTAGE_FRAGMENT) || (stage == SG_SHADERSTAGE_COMPUTE));
  14827. const NSUInteger mtl_slot = shd->mtl.smp_sampler_n[i];
  14828. SOKOL_ASSERT(mtl_slot < _SG_MTL_MAX_STAGE_SAMPLER_BINDINGS);
  14829. if (stage == SG_SHADERSTAGE_VERTEX) {
  14830. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14831. if (!_sg_sref_slot_eql(&_sg.mtl.cache.cur_vssmps[mtl_slot], &smp->slot)) {
  14832. _sg.mtl.cache.cur_vssmps[mtl_slot] = _sg_sref(&smp->slot);
  14833. [_sg.mtl.render_cmd_encoder setVertexSamplerState:_sg_mtl_id(smp->mtl.sampler_state) atIndex:mtl_slot];
  14834. _sg_stats_inc(metal.bindings.num_set_vertex_sampler_state);
  14835. } else {
  14836. _sg_stats_inc(metal.bindings.num_skip_redundant_vertex_sampler_state);
  14837. }
  14838. } else if (stage == SG_SHADERSTAGE_FRAGMENT) {
  14839. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14840. if (!_sg_sref_slot_eql(&_sg.mtl.cache.cur_fssmps[mtl_slot], &smp->slot)) {
  14841. _sg.mtl.cache.cur_fssmps[mtl_slot] = _sg_sref(&smp->slot);
  14842. [_sg.mtl.render_cmd_encoder setFragmentSamplerState:_sg_mtl_id(smp->mtl.sampler_state) atIndex:mtl_slot];
  14843. _sg_stats_inc(metal.bindings.num_set_fragment_sampler_state);
  14844. } else {
  14845. _sg_stats_inc(metal.bindings.num_skip_redundant_fragment_sampler_state);
  14846. }
  14847. } else if (stage == SG_SHADERSTAGE_COMPUTE) {
  14848. SOKOL_ASSERT(nil != _sg.mtl.compute_cmd_encoder);
  14849. if (!_sg_sref_slot_eql(&_sg.mtl.cache.cur_cssmps[mtl_slot], &smp->slot)) {
  14850. _sg.mtl.cache.cur_cssmps[mtl_slot] = _sg_sref(&smp->slot);
  14851. [_sg.mtl.compute_cmd_encoder setSamplerState:_sg_mtl_id(smp->mtl.sampler_state) atIndex:mtl_slot];
  14852. _sg_stats_inc(metal.bindings.num_set_compute_sampler_state);
  14853. } else {
  14854. _sg_stats_inc(metal.bindings.num_skip_redundant_compute_sampler_state);
  14855. }
  14856. } else SOKOL_UNREACHABLE;
  14857. }
  14858. return true;
  14859. }
  14860. _SOKOL_PRIVATE void _sg_mtl_apply_uniforms(int ub_slot, const sg_range* data) {
  14861. SOKOL_ASSERT((ub_slot >= 0) && (ub_slot < SG_MAX_UNIFORMBLOCK_BINDSLOTS));
  14862. SOKOL_ASSERT(((size_t)_sg.mtl.cur_ub_offset + data->size) <= (size_t)_sg.mtl.ub_size);
  14863. SOKOL_ASSERT((_sg.mtl.cur_ub_offset & (_SG_MTL_UB_ALIGN-1)) == 0);
  14864. const _sg_pipeline_t* pip = _sg_pipeline_ref_ptr(&_sg.cur_pip);
  14865. SOKOL_ASSERT(pip);
  14866. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  14867. SOKOL_ASSERT(data->size == shd->cmn.uniform_blocks[ub_slot].size);
  14868. const sg_shader_stage stage = shd->cmn.uniform_blocks[ub_slot].stage;
  14869. const NSUInteger mtl_slot = shd->mtl.ub_buffer_n[ub_slot];
  14870. // copy to global uniform buffer, record offset into cmd encoder, and advance offset
  14871. uint8_t* dst = &_sg.mtl.cur_ub_base_ptr[_sg.mtl.cur_ub_offset];
  14872. memcpy(dst, data->ptr, data->size);
  14873. if (stage == SG_SHADERSTAGE_VERTEX) {
  14874. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14875. [_sg.mtl.render_cmd_encoder setVertexBufferOffset:(NSUInteger)_sg.mtl.cur_ub_offset atIndex:mtl_slot];
  14876. _sg_stats_inc(metal.uniforms.num_set_vertex_buffer_offset);
  14877. } else if (stage == SG_SHADERSTAGE_FRAGMENT) {
  14878. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14879. [_sg.mtl.render_cmd_encoder setFragmentBufferOffset:(NSUInteger)_sg.mtl.cur_ub_offset atIndex:mtl_slot];
  14880. _sg_stats_inc(metal.uniforms.num_set_fragment_buffer_offset);
  14881. } else if (stage == SG_SHADERSTAGE_COMPUTE) {
  14882. SOKOL_ASSERT(nil != _sg.mtl.compute_cmd_encoder);
  14883. [_sg.mtl.compute_cmd_encoder setBufferOffset:(NSUInteger)_sg.mtl.cur_ub_offset atIndex:mtl_slot];
  14884. _sg_stats_inc(metal.uniforms.num_set_compute_buffer_offset);
  14885. } else {
  14886. SOKOL_UNREACHABLE;
  14887. }
  14888. _sg.mtl.cur_ub_offset = _sg_roundup(_sg.mtl.cur_ub_offset + (int)data->size, _SG_MTL_UB_ALIGN);
  14889. }
  14890. _SOKOL_PRIVATE void _sg_mtl_draw(int base_element, int num_elements, int num_instances, int base_vertex, int base_instance) {
  14891. SOKOL_ASSERT(nil != _sg.mtl.render_cmd_encoder);
  14892. const _sg_pipeline_t* pip = _sg_pipeline_ref_ptr(&_sg.cur_pip);
  14893. SOKOL_ASSERT(pip);
  14894. if (_sg.use_indexed_draw) {
  14895. // indexed rendering
  14896. const _sg_buffer_t* ib = _sg_buffer_ref_ptr(&_sg.mtl.cache.cur_ibuf);
  14897. SOKOL_ASSERT(ib && (ib->mtl.buf[ib->cmn.active_slot] != _SG_MTL_INVALID_SLOT_INDEX));
  14898. const NSUInteger index_buffer_offset = (NSUInteger) (_sg.mtl.cache.cur_ibuf_offset + base_element * pip->mtl.index_size);
  14899. [_sg.mtl.render_cmd_encoder drawIndexedPrimitives:pip->mtl.prim_type
  14900. indexCount:(NSUInteger)num_elements
  14901. indexType:pip->mtl.index_type
  14902. indexBuffer:_sg_mtl_id(ib->mtl.buf[ib->cmn.active_slot])
  14903. indexBufferOffset:index_buffer_offset
  14904. instanceCount:(NSUInteger)num_instances
  14905. baseVertex:base_vertex
  14906. baseInstance:(NSUInteger)base_instance];
  14907. } else {
  14908. // non-indexed rendering
  14909. [_sg.mtl.render_cmd_encoder drawPrimitives:pip->mtl.prim_type
  14910. vertexStart:(NSUInteger)base_element
  14911. vertexCount:(NSUInteger)num_elements
  14912. instanceCount:(NSUInteger)num_instances
  14913. baseInstance:(NSUInteger)base_instance];
  14914. }
  14915. }
  14916. _SOKOL_PRIVATE void _sg_mtl_dispatch(int num_groups_x, int num_groups_y, int num_groups_z) {
  14917. SOKOL_ASSERT(nil != _sg.mtl.compute_cmd_encoder);
  14918. const _sg_pipeline_t* pip = _sg_pipeline_ref_ptr(&_sg.cur_pip);
  14919. SOKOL_ASSERT(pip);
  14920. const MTLSize thread_groups = MTLSizeMake(
  14921. (NSUInteger)num_groups_x,
  14922. (NSUInteger)num_groups_y,
  14923. (NSUInteger)num_groups_z);
  14924. const MTLSize threads_per_threadgroup = pip->mtl.threads_per_threadgroup;
  14925. [_sg.mtl.compute_cmd_encoder dispatchThreadgroups:thread_groups threadsPerThreadgroup:threads_per_threadgroup];
  14926. }
  14927. _SOKOL_PRIVATE void _sg_mtl_update_buffer(_sg_buffer_t* buf, const sg_range* data) {
  14928. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  14929. if (++buf->cmn.active_slot >= buf->cmn.num_slots) {
  14930. buf->cmn.active_slot = 0;
  14931. }
  14932. __unsafe_unretained id<MTLBuffer> mtl_buf = _sg_mtl_id(buf->mtl.buf[buf->cmn.active_slot]);
  14933. void* dst_ptr = [mtl_buf contents];
  14934. memcpy(dst_ptr, data->ptr, data->size);
  14935. #if defined(_SG_TARGET_MACOS)
  14936. if (_sg_mtl_resource_options_storage_mode_managed_or_shared() == MTLResourceStorageModeManaged) {
  14937. [mtl_buf didModifyRange:NSMakeRange(0, data->size)];
  14938. }
  14939. #endif
  14940. }
  14941. _SOKOL_PRIVATE void _sg_mtl_append_buffer(_sg_buffer_t* buf, const sg_range* data, bool new_frame) {
  14942. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  14943. if (new_frame) {
  14944. if (++buf->cmn.active_slot >= buf->cmn.num_slots) {
  14945. buf->cmn.active_slot = 0;
  14946. }
  14947. }
  14948. __unsafe_unretained id<MTLBuffer> mtl_buf = _sg_mtl_id(buf->mtl.buf[buf->cmn.active_slot]);
  14949. uint8_t* dst_ptr = (uint8_t*) [mtl_buf contents];
  14950. dst_ptr += buf->cmn.append_pos;
  14951. memcpy(dst_ptr, data->ptr, data->size);
  14952. #if defined(_SG_TARGET_MACOS)
  14953. if (_sg_mtl_resource_options_storage_mode_managed_or_shared() == MTLResourceStorageModeManaged) {
  14954. [mtl_buf didModifyRange:NSMakeRange((NSUInteger)buf->cmn.append_pos, (NSUInteger)data->size)];
  14955. }
  14956. #endif
  14957. }
  14958. _SOKOL_PRIVATE void _sg_mtl_update_image(_sg_image_t* img, const sg_image_data* data) {
  14959. SOKOL_ASSERT(img && data);
  14960. if (++img->cmn.active_slot >= img->cmn.num_slots) {
  14961. img->cmn.active_slot = 0;
  14962. }
  14963. __unsafe_unretained id<MTLTexture> mtl_tex = _sg_mtl_id(img->mtl.tex[img->cmn.active_slot]);
  14964. _sg_mtl_copy_image_data(img, mtl_tex, data);
  14965. }
  14966. _SOKOL_PRIVATE void _sg_mtl_push_debug_group(const char* name) {
  14967. SOKOL_ASSERT(name);
  14968. if (_sg.mtl.render_cmd_encoder) {
  14969. [_sg.mtl.render_cmd_encoder pushDebugGroup:[NSString stringWithUTF8String:name]];
  14970. } else if (_sg.mtl.compute_cmd_encoder) {
  14971. [_sg.mtl.compute_cmd_encoder pushDebugGroup:[NSString stringWithUTF8String:name]];
  14972. }
  14973. }
  14974. _SOKOL_PRIVATE void _sg_mtl_pop_debug_group(void) {
  14975. if (_sg.mtl.render_cmd_encoder) {
  14976. [_sg.mtl.render_cmd_encoder popDebugGroup];
  14977. } else if (_sg.mtl.compute_cmd_encoder) {
  14978. [_sg.mtl.compute_cmd_encoder popDebugGroup];
  14979. }
  14980. }
  14981. // ██ ██ ███████ ██████ ██████ ██████ ██ ██ ██████ █████ ██████ ██ ██ ███████ ███ ██ ██████
  14982. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ ██ ██
  14983. // ██ █ ██ █████ ██████ ██ ███ ██████ ██ ██ ██████ ███████ ██ █████ █████ ██ ██ ██ ██ ██
  14984. // ██ ███ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  14985. // ███ ███ ███████ ██████ ██████ ██ ██████ ██████ ██ ██ ██████ ██ ██ ███████ ██ ████ ██████
  14986. //
  14987. // >>webgpu
  14988. // >>wgpu
  14989. #elif defined(SOKOL_WGPU)
  14990. _SOKOL_PRIVATE WGPUStringView _sg_wgpu_stringview(const char* str) {
  14991. WGPUStringView res;
  14992. if (str) {
  14993. res.data = str;
  14994. res.length = strlen(str);
  14995. } else {
  14996. res.data = 0;
  14997. res.length = 0;
  14998. }
  14999. return res;
  15000. }
  15001. _SOKOL_PRIVATE WGPUOptionalBool _sg_wgpu_optional_bool(bool b) {
  15002. return b ? WGPUOptionalBool_True : WGPUOptionalBool_False;
  15003. }
  15004. _SOKOL_PRIVATE WGPUBufferUsage _sg_wgpu_buffer_usage(const sg_buffer_usage* usg) {
  15005. int res = 0;
  15006. if (usg->vertex_buffer) {
  15007. res |= (int)WGPUBufferUsage_Vertex;
  15008. }
  15009. if (usg->index_buffer) {
  15010. res |= (int)WGPUBufferUsage_Index;
  15011. }
  15012. if (usg->storage_buffer) {
  15013. res |= (int)WGPUBufferUsage_Storage;
  15014. }
  15015. if (!usg->immutable) {
  15016. res |= (int)WGPUBufferUsage_CopyDst;
  15017. }
  15018. return (WGPUBufferUsage)res;
  15019. }
  15020. _SOKOL_PRIVATE WGPULoadOp _sg_wgpu_load_op(WGPUTextureView view, sg_load_action a) {
  15021. if (0 == view) {
  15022. return WGPULoadOp_Undefined;
  15023. } else switch (a) {
  15024. case SG_LOADACTION_CLEAR:
  15025. case SG_LOADACTION_DONTCARE:
  15026. return WGPULoadOp_Clear;
  15027. case SG_LOADACTION_LOAD:
  15028. return WGPULoadOp_Load;
  15029. default:
  15030. SOKOL_UNREACHABLE;
  15031. return WGPULoadOp_Force32;
  15032. }
  15033. }
  15034. _SOKOL_PRIVATE WGPUStoreOp _sg_wgpu_store_op(WGPUTextureView view, sg_store_action a) {
  15035. if (0 == view) {
  15036. return WGPUStoreOp_Undefined;
  15037. } else switch (a) {
  15038. case SG_STOREACTION_STORE:
  15039. return WGPUStoreOp_Store;
  15040. case SG_STOREACTION_DONTCARE:
  15041. return WGPUStoreOp_Discard;
  15042. default:
  15043. SOKOL_UNREACHABLE;
  15044. return WGPUStoreOp_Force32;
  15045. }
  15046. }
  15047. _SOKOL_PRIVATE WGPUTextureViewDimension _sg_wgpu_texture_view_dimension(sg_image_type t) {
  15048. switch (t) {
  15049. case SG_IMAGETYPE_2D: return WGPUTextureViewDimension_2D;
  15050. case SG_IMAGETYPE_CUBE: return WGPUTextureViewDimension_Cube;
  15051. case SG_IMAGETYPE_3D: return WGPUTextureViewDimension_3D;
  15052. case SG_IMAGETYPE_ARRAY: return WGPUTextureViewDimension_2DArray;
  15053. default: SOKOL_UNREACHABLE; return WGPUTextureViewDimension_Force32;
  15054. }
  15055. }
  15056. _SOKOL_PRIVATE WGPUTextureViewDimension _sg_wgpu_attachment_view_dimension(sg_image_type t) {
  15057. switch (t) {
  15058. case SG_IMAGETYPE_2D: return WGPUTextureViewDimension_2D;
  15059. case SG_IMAGETYPE_CUBE: return WGPUTextureViewDimension_2DArray; // not a bug
  15060. case SG_IMAGETYPE_3D: return WGPUTextureViewDimension_2D; // not a bug
  15061. case SG_IMAGETYPE_ARRAY: return WGPUTextureViewDimension_2DArray;
  15062. default: SOKOL_UNREACHABLE; return WGPUTextureViewDimension_Force32;
  15063. }
  15064. }
  15065. _SOKOL_PRIVATE WGPUTextureDimension _sg_wgpu_texture_dimension(sg_image_type t) {
  15066. if (SG_IMAGETYPE_3D == t) {
  15067. return WGPUTextureDimension_3D;
  15068. } else {
  15069. return WGPUTextureDimension_2D;
  15070. }
  15071. }
  15072. _SOKOL_PRIVATE WGPUTextureSampleType _sg_wgpu_texture_sample_type(sg_image_sample_type t, bool msaa) {
  15073. switch (t) {
  15074. case SG_IMAGESAMPLETYPE_FLOAT: return msaa ? WGPUTextureSampleType_UnfilterableFloat : WGPUTextureSampleType_Float;
  15075. case SG_IMAGESAMPLETYPE_DEPTH: return WGPUTextureSampleType_Depth;
  15076. case SG_IMAGESAMPLETYPE_SINT: return WGPUTextureSampleType_Sint;
  15077. case SG_IMAGESAMPLETYPE_UINT: return WGPUTextureSampleType_Uint;
  15078. case SG_IMAGESAMPLETYPE_UNFILTERABLE_FLOAT: return WGPUTextureSampleType_UnfilterableFloat;
  15079. default: SOKOL_UNREACHABLE; return WGPUTextureSampleType_Force32;
  15080. }
  15081. }
  15082. _SOKOL_PRIVATE WGPUSamplerBindingType _sg_wgpu_sampler_binding_type(sg_sampler_type t) {
  15083. switch (t) {
  15084. case SG_SAMPLERTYPE_FILTERING: return WGPUSamplerBindingType_Filtering;
  15085. case SG_SAMPLERTYPE_COMPARISON: return WGPUSamplerBindingType_Comparison;
  15086. case SG_SAMPLERTYPE_NONFILTERING: return WGPUSamplerBindingType_NonFiltering;
  15087. default: SOKOL_UNREACHABLE; return WGPUSamplerBindingType_Force32;
  15088. }
  15089. }
  15090. _SOKOL_PRIVATE WGPUAddressMode _sg_wgpu_sampler_address_mode(sg_wrap m) {
  15091. switch (m) {
  15092. case SG_WRAP_REPEAT:
  15093. return WGPUAddressMode_Repeat;
  15094. case SG_WRAP_CLAMP_TO_EDGE:
  15095. case SG_WRAP_CLAMP_TO_BORDER:
  15096. return WGPUAddressMode_ClampToEdge;
  15097. case SG_WRAP_MIRRORED_REPEAT:
  15098. return WGPUAddressMode_MirrorRepeat;
  15099. default:
  15100. SOKOL_UNREACHABLE;
  15101. return WGPUAddressMode_Force32;
  15102. }
  15103. }
  15104. _SOKOL_PRIVATE WGPUFilterMode _sg_wgpu_sampler_minmag_filter(sg_filter f) {
  15105. switch (f) {
  15106. case SG_FILTER_NEAREST:
  15107. return WGPUFilterMode_Nearest;
  15108. case SG_FILTER_LINEAR:
  15109. return WGPUFilterMode_Linear;
  15110. default:
  15111. SOKOL_UNREACHABLE;
  15112. return WGPUFilterMode_Force32;
  15113. }
  15114. }
  15115. _SOKOL_PRIVATE WGPUMipmapFilterMode _sg_wgpu_sampler_mipmap_filter(sg_filter f) {
  15116. switch (f) {
  15117. case SG_FILTER_NEAREST:
  15118. return WGPUMipmapFilterMode_Nearest;
  15119. case SG_FILTER_LINEAR:
  15120. return WGPUMipmapFilterMode_Linear;
  15121. default:
  15122. SOKOL_UNREACHABLE;
  15123. return WGPUMipmapFilterMode_Force32;
  15124. }
  15125. }
  15126. _SOKOL_PRIVATE WGPUIndexFormat _sg_wgpu_indexformat(sg_index_type t) {
  15127. // NOTE: there's no WGPUIndexFormat_None
  15128. return (t == SG_INDEXTYPE_UINT16) ? WGPUIndexFormat_Uint16 : WGPUIndexFormat_Uint32;
  15129. }
  15130. _SOKOL_PRIVATE WGPUIndexFormat _sg_wgpu_stripindexformat(sg_primitive_type prim_type, sg_index_type idx_type) {
  15131. if (idx_type == SG_INDEXTYPE_NONE) {
  15132. return WGPUIndexFormat_Undefined;
  15133. } else if ((prim_type == SG_PRIMITIVETYPE_LINE_STRIP) || (prim_type == SG_PRIMITIVETYPE_TRIANGLE_STRIP)) {
  15134. return _sg_wgpu_indexformat(idx_type);
  15135. } else {
  15136. return WGPUIndexFormat_Undefined;
  15137. }
  15138. }
  15139. _SOKOL_PRIVATE WGPUVertexStepMode _sg_wgpu_stepmode(sg_vertex_step s) {
  15140. return (s == SG_VERTEXSTEP_PER_VERTEX) ? WGPUVertexStepMode_Vertex : WGPUVertexStepMode_Instance;
  15141. }
  15142. _SOKOL_PRIVATE WGPUVertexFormat _sg_wgpu_vertexformat(sg_vertex_format f) {
  15143. switch (f) {
  15144. case SG_VERTEXFORMAT_FLOAT: return WGPUVertexFormat_Float32;
  15145. case SG_VERTEXFORMAT_FLOAT2: return WGPUVertexFormat_Float32x2;
  15146. case SG_VERTEXFORMAT_FLOAT3: return WGPUVertexFormat_Float32x3;
  15147. case SG_VERTEXFORMAT_FLOAT4: return WGPUVertexFormat_Float32x4;
  15148. case SG_VERTEXFORMAT_INT: return WGPUVertexFormat_Sint32;
  15149. case SG_VERTEXFORMAT_INT2: return WGPUVertexFormat_Sint32x2;
  15150. case SG_VERTEXFORMAT_INT3: return WGPUVertexFormat_Sint32x3;
  15151. case SG_VERTEXFORMAT_INT4: return WGPUVertexFormat_Sint32x4;
  15152. case SG_VERTEXFORMAT_UINT: return WGPUVertexFormat_Uint32;
  15153. case SG_VERTEXFORMAT_UINT2: return WGPUVertexFormat_Uint32x2;
  15154. case SG_VERTEXFORMAT_UINT3: return WGPUVertexFormat_Uint32x3;
  15155. case SG_VERTEXFORMAT_UINT4: return WGPUVertexFormat_Uint32x4;
  15156. case SG_VERTEXFORMAT_BYTE4: return WGPUVertexFormat_Sint8x4;
  15157. case SG_VERTEXFORMAT_BYTE4N: return WGPUVertexFormat_Snorm8x4;
  15158. case SG_VERTEXFORMAT_UBYTE4: return WGPUVertexFormat_Uint8x4;
  15159. case SG_VERTEXFORMAT_UBYTE4N: return WGPUVertexFormat_Unorm8x4;
  15160. case SG_VERTEXFORMAT_SHORT2: return WGPUVertexFormat_Sint16x2;
  15161. case SG_VERTEXFORMAT_SHORT2N: return WGPUVertexFormat_Snorm16x2;
  15162. case SG_VERTEXFORMAT_USHORT2: return WGPUVertexFormat_Uint16x2;
  15163. case SG_VERTEXFORMAT_USHORT2N: return WGPUVertexFormat_Unorm16x2;
  15164. case SG_VERTEXFORMAT_SHORT4: return WGPUVertexFormat_Sint16x4;
  15165. case SG_VERTEXFORMAT_SHORT4N: return WGPUVertexFormat_Snorm16x4;
  15166. case SG_VERTEXFORMAT_USHORT4: return WGPUVertexFormat_Uint16x4;
  15167. case SG_VERTEXFORMAT_USHORT4N: return WGPUVertexFormat_Unorm16x4;
  15168. case SG_VERTEXFORMAT_UINT10_N2: return WGPUVertexFormat_Unorm10_10_10_2;
  15169. case SG_VERTEXFORMAT_HALF2: return WGPUVertexFormat_Float16x2;
  15170. case SG_VERTEXFORMAT_HALF4: return WGPUVertexFormat_Float16x4;
  15171. default:
  15172. SOKOL_UNREACHABLE;
  15173. return WGPUVertexFormat_Force32;
  15174. }
  15175. }
  15176. _SOKOL_PRIVATE WGPUPrimitiveTopology _sg_wgpu_topology(sg_primitive_type t) {
  15177. switch (t) {
  15178. case SG_PRIMITIVETYPE_POINTS: return WGPUPrimitiveTopology_PointList;
  15179. case SG_PRIMITIVETYPE_LINES: return WGPUPrimitiveTopology_LineList;
  15180. case SG_PRIMITIVETYPE_LINE_STRIP: return WGPUPrimitiveTopology_LineStrip;
  15181. case SG_PRIMITIVETYPE_TRIANGLES: return WGPUPrimitiveTopology_TriangleList;
  15182. case SG_PRIMITIVETYPE_TRIANGLE_STRIP: return WGPUPrimitiveTopology_TriangleStrip;
  15183. default:
  15184. SOKOL_UNREACHABLE;
  15185. return WGPUPrimitiveTopology_Force32;
  15186. }
  15187. }
  15188. _SOKOL_PRIVATE WGPUFrontFace _sg_wgpu_frontface(sg_face_winding fw) {
  15189. return (fw == SG_FACEWINDING_CCW) ? WGPUFrontFace_CCW : WGPUFrontFace_CW;
  15190. }
  15191. _SOKOL_PRIVATE WGPUCullMode _sg_wgpu_cullmode(sg_cull_mode cm) {
  15192. switch (cm) {
  15193. case SG_CULLMODE_NONE: return WGPUCullMode_None;
  15194. case SG_CULLMODE_FRONT: return WGPUCullMode_Front;
  15195. case SG_CULLMODE_BACK: return WGPUCullMode_Back;
  15196. default:
  15197. SOKOL_UNREACHABLE;
  15198. return WGPUCullMode_Force32;
  15199. }
  15200. }
  15201. _SOKOL_PRIVATE WGPUTextureFormat _sg_wgpu_textureformat(sg_pixel_format p) {
  15202. switch (p) {
  15203. case SG_PIXELFORMAT_NONE: return WGPUTextureFormat_Undefined;
  15204. case SG_PIXELFORMAT_R8: return WGPUTextureFormat_R8Unorm;
  15205. case SG_PIXELFORMAT_R8SN: return WGPUTextureFormat_R8Snorm;
  15206. case SG_PIXELFORMAT_R8UI: return WGPUTextureFormat_R8Uint;
  15207. case SG_PIXELFORMAT_R8SI: return WGPUTextureFormat_R8Sint;
  15208. case SG_PIXELFORMAT_R16UI: return WGPUTextureFormat_R16Uint;
  15209. case SG_PIXELFORMAT_R16SI: return WGPUTextureFormat_R16Sint;
  15210. case SG_PIXELFORMAT_R16F: return WGPUTextureFormat_R16Float;
  15211. case SG_PIXELFORMAT_RG8: return WGPUTextureFormat_RG8Unorm;
  15212. case SG_PIXELFORMAT_RG8SN: return WGPUTextureFormat_RG8Snorm;
  15213. case SG_PIXELFORMAT_RG8UI: return WGPUTextureFormat_RG8Uint;
  15214. case SG_PIXELFORMAT_RG8SI: return WGPUTextureFormat_RG8Sint;
  15215. case SG_PIXELFORMAT_R32UI: return WGPUTextureFormat_R32Uint;
  15216. case SG_PIXELFORMAT_R32SI: return WGPUTextureFormat_R32Sint;
  15217. case SG_PIXELFORMAT_R32F: return WGPUTextureFormat_R32Float;
  15218. case SG_PIXELFORMAT_RG16UI: return WGPUTextureFormat_RG16Uint;
  15219. case SG_PIXELFORMAT_RG16SI: return WGPUTextureFormat_RG16Sint;
  15220. case SG_PIXELFORMAT_RG16F: return WGPUTextureFormat_RG16Float;
  15221. case SG_PIXELFORMAT_RGBA8: return WGPUTextureFormat_RGBA8Unorm;
  15222. case SG_PIXELFORMAT_SRGB8A8: return WGPUTextureFormat_RGBA8UnormSrgb;
  15223. case SG_PIXELFORMAT_RGBA8SN: return WGPUTextureFormat_RGBA8Snorm;
  15224. case SG_PIXELFORMAT_RGBA8UI: return WGPUTextureFormat_RGBA8Uint;
  15225. case SG_PIXELFORMAT_RGBA8SI: return WGPUTextureFormat_RGBA8Sint;
  15226. case SG_PIXELFORMAT_BGRA8: return WGPUTextureFormat_BGRA8Unorm;
  15227. case SG_PIXELFORMAT_RGB10A2: return WGPUTextureFormat_RGB10A2Unorm;
  15228. case SG_PIXELFORMAT_RG11B10F: return WGPUTextureFormat_RG11B10Ufloat;
  15229. case SG_PIXELFORMAT_RGB9E5: return WGPUTextureFormat_RGB9E5Ufloat;
  15230. case SG_PIXELFORMAT_RG32UI: return WGPUTextureFormat_RG32Uint;
  15231. case SG_PIXELFORMAT_RG32SI: return WGPUTextureFormat_RG32Sint;
  15232. case SG_PIXELFORMAT_RG32F: return WGPUTextureFormat_RG32Float;
  15233. case SG_PIXELFORMAT_RGBA16UI: return WGPUTextureFormat_RGBA16Uint;
  15234. case SG_PIXELFORMAT_RGBA16SI: return WGPUTextureFormat_RGBA16Sint;
  15235. case SG_PIXELFORMAT_RGBA16F: return WGPUTextureFormat_RGBA16Float;
  15236. case SG_PIXELFORMAT_RGBA32UI: return WGPUTextureFormat_RGBA32Uint;
  15237. case SG_PIXELFORMAT_RGBA32SI: return WGPUTextureFormat_RGBA32Sint;
  15238. case SG_PIXELFORMAT_RGBA32F: return WGPUTextureFormat_RGBA32Float;
  15239. case SG_PIXELFORMAT_DEPTH: return WGPUTextureFormat_Depth32Float;
  15240. case SG_PIXELFORMAT_DEPTH_STENCIL: return WGPUTextureFormat_Depth32FloatStencil8;
  15241. case SG_PIXELFORMAT_BC1_RGBA: return WGPUTextureFormat_BC1RGBAUnorm;
  15242. case SG_PIXELFORMAT_BC2_RGBA: return WGPUTextureFormat_BC2RGBAUnorm;
  15243. case SG_PIXELFORMAT_BC3_RGBA: return WGPUTextureFormat_BC3RGBAUnorm;
  15244. case SG_PIXELFORMAT_BC3_SRGBA: return WGPUTextureFormat_BC3RGBAUnormSrgb;
  15245. case SG_PIXELFORMAT_BC4_R: return WGPUTextureFormat_BC4RUnorm;
  15246. case SG_PIXELFORMAT_BC4_RSN: return WGPUTextureFormat_BC4RSnorm;
  15247. case SG_PIXELFORMAT_BC5_RG: return WGPUTextureFormat_BC5RGUnorm;
  15248. case SG_PIXELFORMAT_BC5_RGSN: return WGPUTextureFormat_BC5RGSnorm;
  15249. case SG_PIXELFORMAT_BC6H_RGBF: return WGPUTextureFormat_BC6HRGBFloat;
  15250. case SG_PIXELFORMAT_BC6H_RGBUF: return WGPUTextureFormat_BC6HRGBUfloat;
  15251. case SG_PIXELFORMAT_BC7_RGBA: return WGPUTextureFormat_BC7RGBAUnorm;
  15252. case SG_PIXELFORMAT_BC7_SRGBA: return WGPUTextureFormat_BC7RGBAUnormSrgb;
  15253. case SG_PIXELFORMAT_ETC2_RGB8: return WGPUTextureFormat_ETC2RGB8Unorm;
  15254. case SG_PIXELFORMAT_ETC2_RGB8A1: return WGPUTextureFormat_ETC2RGB8A1Unorm;
  15255. case SG_PIXELFORMAT_ETC2_RGBA8: return WGPUTextureFormat_ETC2RGBA8Unorm;
  15256. case SG_PIXELFORMAT_ETC2_SRGB8: return WGPUTextureFormat_ETC2RGB8UnormSrgb;
  15257. case SG_PIXELFORMAT_ETC2_SRGB8A8: return WGPUTextureFormat_ETC2RGBA8UnormSrgb;
  15258. case SG_PIXELFORMAT_EAC_R11: return WGPUTextureFormat_EACR11Unorm;
  15259. case SG_PIXELFORMAT_EAC_R11SN: return WGPUTextureFormat_EACR11Snorm;
  15260. case SG_PIXELFORMAT_EAC_RG11: return WGPUTextureFormat_EACRG11Unorm;
  15261. case SG_PIXELFORMAT_EAC_RG11SN: return WGPUTextureFormat_EACRG11Snorm;
  15262. case SG_PIXELFORMAT_ASTC_4x4_RGBA: return WGPUTextureFormat_ASTC4x4Unorm;
  15263. case SG_PIXELFORMAT_ASTC_4x4_SRGBA: return WGPUTextureFormat_ASTC4x4UnormSrgb;
  15264. // NOT SUPPORTED
  15265. case SG_PIXELFORMAT_R16:
  15266. case SG_PIXELFORMAT_R16SN:
  15267. case SG_PIXELFORMAT_RG16:
  15268. case SG_PIXELFORMAT_RG16SN:
  15269. case SG_PIXELFORMAT_RGBA16:
  15270. case SG_PIXELFORMAT_RGBA16SN:
  15271. return WGPUTextureFormat_Undefined;
  15272. default:
  15273. SOKOL_UNREACHABLE;
  15274. return WGPUTextureFormat_Force32;
  15275. }
  15276. }
  15277. _SOKOL_PRIVATE WGPUCompareFunction _sg_wgpu_comparefunc(sg_compare_func f) {
  15278. switch (f) {
  15279. case SG_COMPAREFUNC_NEVER: return WGPUCompareFunction_Never;
  15280. case SG_COMPAREFUNC_LESS: return WGPUCompareFunction_Less;
  15281. case SG_COMPAREFUNC_EQUAL: return WGPUCompareFunction_Equal;
  15282. case SG_COMPAREFUNC_LESS_EQUAL: return WGPUCompareFunction_LessEqual;
  15283. case SG_COMPAREFUNC_GREATER: return WGPUCompareFunction_Greater;
  15284. case SG_COMPAREFUNC_NOT_EQUAL: return WGPUCompareFunction_NotEqual;
  15285. case SG_COMPAREFUNC_GREATER_EQUAL: return WGPUCompareFunction_GreaterEqual;
  15286. case SG_COMPAREFUNC_ALWAYS: return WGPUCompareFunction_Always;
  15287. default:
  15288. SOKOL_UNREACHABLE;
  15289. return WGPUCompareFunction_Force32;
  15290. }
  15291. }
  15292. _SOKOL_PRIVATE WGPUStencilOperation _sg_wgpu_stencilop(sg_stencil_op op) {
  15293. switch (op) {
  15294. case SG_STENCILOP_KEEP: return WGPUStencilOperation_Keep;
  15295. case SG_STENCILOP_ZERO: return WGPUStencilOperation_Zero;
  15296. case SG_STENCILOP_REPLACE: return WGPUStencilOperation_Replace;
  15297. case SG_STENCILOP_INCR_CLAMP: return WGPUStencilOperation_IncrementClamp;
  15298. case SG_STENCILOP_DECR_CLAMP: return WGPUStencilOperation_DecrementClamp;
  15299. case SG_STENCILOP_INVERT: return WGPUStencilOperation_Invert;
  15300. case SG_STENCILOP_INCR_WRAP: return WGPUStencilOperation_IncrementWrap;
  15301. case SG_STENCILOP_DECR_WRAP: return WGPUStencilOperation_DecrementWrap;
  15302. default:
  15303. SOKOL_UNREACHABLE;
  15304. return WGPUStencilOperation_Force32;
  15305. }
  15306. }
  15307. _SOKOL_PRIVATE WGPUBlendOperation _sg_wgpu_blendop(sg_blend_op op) {
  15308. switch (op) {
  15309. case SG_BLENDOP_ADD: return WGPUBlendOperation_Add;
  15310. case SG_BLENDOP_SUBTRACT: return WGPUBlendOperation_Subtract;
  15311. case SG_BLENDOP_REVERSE_SUBTRACT: return WGPUBlendOperation_ReverseSubtract;
  15312. case SG_BLENDOP_MIN: return WGPUBlendOperation_Min;
  15313. case SG_BLENDOP_MAX: return WGPUBlendOperation_Max;
  15314. default:
  15315. SOKOL_UNREACHABLE;
  15316. return WGPUBlendOperation_Force32;
  15317. }
  15318. }
  15319. _SOKOL_PRIVATE WGPUBlendFactor _sg_wgpu_blendfactor(sg_blend_factor f) {
  15320. switch (f) {
  15321. case SG_BLENDFACTOR_ZERO: return WGPUBlendFactor_Zero;
  15322. case SG_BLENDFACTOR_ONE: return WGPUBlendFactor_One;
  15323. case SG_BLENDFACTOR_SRC_COLOR: return WGPUBlendFactor_Src;
  15324. case SG_BLENDFACTOR_ONE_MINUS_SRC_COLOR: return WGPUBlendFactor_OneMinusSrc;
  15325. case SG_BLENDFACTOR_SRC_ALPHA: return WGPUBlendFactor_SrcAlpha;
  15326. case SG_BLENDFACTOR_ONE_MINUS_SRC_ALPHA: return WGPUBlendFactor_OneMinusSrcAlpha;
  15327. case SG_BLENDFACTOR_DST_COLOR: return WGPUBlendFactor_Dst;
  15328. case SG_BLENDFACTOR_ONE_MINUS_DST_COLOR: return WGPUBlendFactor_OneMinusDst;
  15329. case SG_BLENDFACTOR_DST_ALPHA: return WGPUBlendFactor_DstAlpha;
  15330. case SG_BLENDFACTOR_ONE_MINUS_DST_ALPHA: return WGPUBlendFactor_OneMinusDstAlpha;
  15331. case SG_BLENDFACTOR_SRC_ALPHA_SATURATED: return WGPUBlendFactor_SrcAlphaSaturated;
  15332. case SG_BLENDFACTOR_BLEND_COLOR: return WGPUBlendFactor_Constant;
  15333. case SG_BLENDFACTOR_ONE_MINUS_BLEND_COLOR: return WGPUBlendFactor_OneMinusConstant;
  15334. // FIXME: separate blend alpha value not supported?
  15335. case SG_BLENDFACTOR_BLEND_ALPHA: return WGPUBlendFactor_Constant;
  15336. case SG_BLENDFACTOR_ONE_MINUS_BLEND_ALPHA: return WGPUBlendFactor_OneMinusConstant;
  15337. default:
  15338. SOKOL_UNREACHABLE;
  15339. return WGPUBlendFactor_Force32;
  15340. }
  15341. }
  15342. _SOKOL_PRIVATE WGPUColorWriteMask _sg_wgpu_colorwritemask(sg_color_mask m) {
  15343. int res = 0;
  15344. if (0 != (m & SG_COLORMASK_R)) {
  15345. res |= (int)WGPUColorWriteMask_Red;
  15346. }
  15347. if (0 != (m & SG_COLORMASK_G)) {
  15348. res |= (int)WGPUColorWriteMask_Green;
  15349. }
  15350. if (0 != (m & SG_COLORMASK_B)) {
  15351. res |= (int)WGPUColorWriteMask_Blue;
  15352. }
  15353. if (0 != (m & SG_COLORMASK_A)) {
  15354. res |= (int)WGPUColorWriteMask_Alpha;
  15355. }
  15356. return (WGPUColorWriteMask)res;
  15357. }
  15358. _SOKOL_PRIVATE WGPUShaderStage _sg_wgpu_shader_stage(sg_shader_stage stage) {
  15359. switch (stage) {
  15360. case SG_SHADERSTAGE_VERTEX: return WGPUShaderStage_Vertex;
  15361. case SG_SHADERSTAGE_FRAGMENT: return WGPUShaderStage_Fragment;
  15362. case SG_SHADERSTAGE_COMPUTE: return WGPUShaderStage_Compute;
  15363. default: SOKOL_UNREACHABLE; return WGPUShaderStage_None;
  15364. }
  15365. }
  15366. _SOKOL_PRIVATE void _sg_wgpu_init_caps(void) {
  15367. _sg.backend = SG_BACKEND_WGPU;
  15368. _sg.features.origin_top_left = true;
  15369. _sg.features.image_clamp_to_border = false;
  15370. _sg.features.mrt_independent_blend_state = true;
  15371. _sg.features.mrt_independent_write_mask = true;
  15372. _sg.features.compute = true;
  15373. _sg.features.msaa_texture_bindings = true;
  15374. _sg.features.draw_base_vertex = true;
  15375. _sg.features.draw_base_instance = true;
  15376. wgpuDeviceGetLimits(_sg.wgpu.dev, &_sg.wgpu.limits);
  15377. const WGPULimits* l = &_sg.wgpu.limits;
  15378. _sg.limits.max_image_size_2d = (int) l->maxTextureDimension2D;
  15379. _sg.limits.max_image_size_cube = (int) l->maxTextureDimension2D; // not a bug, see: https://github.com/gpuweb/gpuweb/issues/1327
  15380. _sg.limits.max_image_size_3d = (int) l->maxTextureDimension3D;
  15381. _sg.limits.max_image_size_array = (int) l->maxTextureDimension2D;
  15382. _sg.limits.max_image_array_layers = (int) l->maxTextureArrayLayers;
  15383. _sg.limits.max_vertex_attrs = SG_MAX_VERTEX_ATTRIBUTES;
  15384. _sg.limits.max_color_attachments = _sg_min((int)l->maxColorAttachments, SG_MAX_COLOR_ATTACHMENTS);
  15385. _sg.limits.max_texture_bindings_per_stage = _sg_min((int)l->maxSampledTexturesPerShaderStage, SG_MAX_VIEW_BINDSLOTS);
  15386. _sg.limits.max_storage_buffer_bindings_per_stage = _sg_min((int)l->maxStorageBuffersPerShaderStage, SG_MAX_VIEW_BINDSLOTS);
  15387. _sg.limits.max_storage_image_bindings_per_stage = _sg_min((int)l->maxStorageTexturesPerShaderStage, SG_MAX_VIEW_BINDSLOTS);
  15388. // NOTE: no WGPUTextureFormat_R16Unorm
  15389. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R8]);
  15390. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG8]);
  15391. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA8]);
  15392. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_SRGB8A8]);
  15393. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_BGRA8]);
  15394. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R16F]);
  15395. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG16F]);
  15396. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA16F]);
  15397. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGB10A2]);
  15398. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_R8SN]);
  15399. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RG8SN]);
  15400. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RGBA8SN]);
  15401. // FIXME: can be made renderable via extension
  15402. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RG11B10F]);
  15403. // NOTE: msaa rendering is possible in WebGPU, but no resolve
  15404. // which is a combination that's not currently supported in sokol-gfx
  15405. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R8UI]);
  15406. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R8SI]);
  15407. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG8UI]);
  15408. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG8SI]);
  15409. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA8UI]);
  15410. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA8SI]);
  15411. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R16UI]);
  15412. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R16SI]);
  15413. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG16UI]);
  15414. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG16SI]);
  15415. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA16UI]);
  15416. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA16SI]);
  15417. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R32UI]);
  15418. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R32SI]);
  15419. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG32UI]);
  15420. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG32SI]);
  15421. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA32UI]);
  15422. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA32SI]);
  15423. if (wgpuDeviceHasFeature(_sg.wgpu.dev, WGPUFeatureName_Float32Filterable)) {
  15424. _sg_pixelformat_sfr(&_sg.formats[SG_PIXELFORMAT_R32F]);
  15425. _sg_pixelformat_sfr(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  15426. _sg_pixelformat_sfr(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  15427. } else {
  15428. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R32F]);
  15429. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  15430. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  15431. }
  15432. _sg_pixelformat_srmd(&_sg.formats[SG_PIXELFORMAT_DEPTH]);
  15433. _sg_pixelformat_srmd(&_sg.formats[SG_PIXELFORMAT_DEPTH_STENCIL]);
  15434. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RGB9E5]);
  15435. if (wgpuDeviceHasFeature(_sg.wgpu.dev, WGPUFeatureName_TextureCompressionBC)) {
  15436. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC1_RGBA]);
  15437. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC2_RGBA]);
  15438. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC3_RGBA]);
  15439. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC3_SRGBA]);
  15440. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC4_R]);
  15441. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC4_RSN]);
  15442. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC5_RG]);
  15443. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC5_RGSN]);
  15444. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC6H_RGBF]);
  15445. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC6H_RGBUF]);
  15446. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC7_RGBA]);
  15447. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC7_SRGBA]);
  15448. }
  15449. if (wgpuDeviceHasFeature(_sg.wgpu.dev, WGPUFeatureName_TextureCompressionETC2)) {
  15450. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGB8]);
  15451. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_SRGB8]);
  15452. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGB8A1]);
  15453. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGBA8]);
  15454. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_SRGB8A8]);
  15455. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_R11]);
  15456. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_R11SN]);
  15457. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_RG11]);
  15458. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_RG11SN]);
  15459. }
  15460. if (wgpuDeviceHasFeature(_sg.wgpu.dev, WGPUFeatureName_TextureCompressionASTC)) {
  15461. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ASTC_4x4_RGBA]);
  15462. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ASTC_4x4_SRGBA]);
  15463. }
  15464. // see: https://github.com/gpuweb/gpuweb/issues/513
  15465. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8]);
  15466. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8SN]);
  15467. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8UI]);
  15468. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8SI]);
  15469. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16UI]);
  15470. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16SI]);
  15471. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16F]);
  15472. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32UI]);
  15473. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32SI]);
  15474. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32F]);
  15475. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32UI]);
  15476. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32SI]);
  15477. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  15478. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32UI]);
  15479. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32SI]);
  15480. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  15481. }
  15482. _SOKOL_PRIVATE void _sg_wgpu_uniform_system_init(const sg_desc* desc) {
  15483. SOKOL_ASSERT(0 == _sg.wgpu.uniform.staging);
  15484. SOKOL_ASSERT(0 == _sg.wgpu.uniform.buf);
  15485. // Add the max-uniform-update size (64 KB) to the requested buffer size,
  15486. // this is to prevent validation errors in the WebGPU implementation
  15487. // if the entire buffer size is used per frame. 64 KB is the allowed
  15488. // max uniform update size on NVIDIA
  15489. //
  15490. // FIXME: is this still needed?
  15491. _sg.wgpu.uniform.num_bytes = (uint32_t)(desc->uniform_buffer_size + _SG_WGPU_MAX_UNIFORM_UPDATE_SIZE);
  15492. _sg.wgpu.uniform.staging = (uint8_t*)_sg_malloc(_sg.wgpu.uniform.num_bytes);
  15493. _SG_STRUCT(WGPUBufferDescriptor, ub_desc);
  15494. ub_desc.size = _sg.wgpu.uniform.num_bytes;
  15495. ub_desc.usage = WGPUBufferUsage_Uniform|WGPUBufferUsage_CopyDst;
  15496. _sg.wgpu.uniform.buf = wgpuDeviceCreateBuffer(_sg.wgpu.dev, &ub_desc);
  15497. SOKOL_ASSERT(_sg.wgpu.uniform.buf);
  15498. }
  15499. _SOKOL_PRIVATE void _sg_wgpu_uniform_system_discard(void) {
  15500. if (_sg.wgpu.uniform.buf) {
  15501. wgpuBufferRelease(_sg.wgpu.uniform.buf);
  15502. _sg.wgpu.uniform.buf = 0;
  15503. }
  15504. if (_sg.wgpu.uniform.staging) {
  15505. _sg_free(_sg.wgpu.uniform.staging);
  15506. _sg.wgpu.uniform.staging = 0;
  15507. }
  15508. }
  15509. _SOKOL_PRIVATE void _sg_wgpu_uniform_system_set_bindgroup(void) {
  15510. SOKOL_ASSERT(_sg.wgpu.uniform.dirty);
  15511. _sg.wgpu.uniform.dirty = false;
  15512. const _sg_pipeline_t* pip = _sg_pipeline_ref_ptr(&_sg.cur_pip);
  15513. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  15514. // NOTE: dynamic offsets must be in binding order, not in BindGroupEntry order
  15515. SOKOL_ASSERT(shd->wgpu.ub_num_dynoffsets < SG_MAX_UNIFORMBLOCK_BINDSLOTS);
  15516. _SG_STRUCT(uint32_t, dyn_offsets[SG_MAX_UNIFORMBLOCK_BINDSLOTS]);
  15517. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  15518. if (shd->cmn.uniform_blocks[i].stage == SG_SHADERSTAGE_NONE) {
  15519. continue;
  15520. }
  15521. uint8_t dynoffset_index = shd->wgpu.ub_dynoffsets[i];
  15522. SOKOL_ASSERT(dynoffset_index < shd->wgpu.ub_num_dynoffsets);
  15523. dyn_offsets[dynoffset_index] = _sg.wgpu.uniform.bind_offsets[i];
  15524. }
  15525. if (_sg.cur_pass.is_compute) {
  15526. SOKOL_ASSERT(_sg.wgpu.cpass_enc);
  15527. wgpuComputePassEncoderSetBindGroup(_sg.wgpu.cpass_enc,
  15528. _SG_WGPU_UB_BINDGROUP_INDEX,
  15529. shd->wgpu.bg_ub,
  15530. shd->wgpu.ub_num_dynoffsets,
  15531. dyn_offsets);
  15532. } else {
  15533. SOKOL_ASSERT(_sg.wgpu.rpass_enc);
  15534. wgpuRenderPassEncoderSetBindGroup(_sg.wgpu.rpass_enc,
  15535. _SG_WGPU_UB_BINDGROUP_INDEX,
  15536. shd->wgpu.bg_ub,
  15537. shd->wgpu.ub_num_dynoffsets,
  15538. dyn_offsets);
  15539. }
  15540. }
  15541. _SOKOL_PRIVATE void _sg_wgpu_uniform_system_on_apply_pipeline(void) {
  15542. _sg.wgpu.uniform.dirty = false;
  15543. }
  15544. _SOKOL_PRIVATE void _sg_wgpu_uniform_system_on_commit(void) {
  15545. wgpuQueueWriteBuffer(_sg.wgpu.queue, _sg.wgpu.uniform.buf, 0, _sg.wgpu.uniform.staging, _sg.wgpu.uniform.offset);
  15546. _sg_stats_add(wgpu.uniforms.size_write_buffer, _sg.wgpu.uniform.offset);
  15547. _sg.wgpu.uniform.offset = 0;
  15548. _sg_clear(_sg.wgpu.uniform.bind_offsets, sizeof(_sg.wgpu.uniform.bind_offsets));
  15549. }
  15550. _SOKOL_PRIVATE void _sg_wgpu_bindgroups_pool_init(const sg_desc* desc) {
  15551. SOKOL_ASSERT((desc->wgpu.bindgroups_cache_size > 0) && (desc->wgpu.bindgroups_cache_size < _SG_MAX_POOL_SIZE));
  15552. _sg_wgpu_bindgroups_pool_t* p = &_sg.wgpu.bindgroups_pool;
  15553. SOKOL_ASSERT(0 == p->bindgroups);
  15554. const int pool_size = desc->wgpu.bindgroups_cache_size;
  15555. _sg_pool_init(&p->pool, pool_size);
  15556. size_t pool_byte_size = sizeof(_sg_wgpu_bindgroup_t) * (size_t)p->pool.size;
  15557. p->bindgroups = (_sg_wgpu_bindgroup_t*) _sg_malloc_clear(pool_byte_size);
  15558. }
  15559. _SOKOL_PRIVATE void _sg_wgpu_bindgroups_pool_discard(void) {
  15560. _sg_wgpu_bindgroups_pool_t* p = &_sg.wgpu.bindgroups_pool;
  15561. SOKOL_ASSERT(p->bindgroups);
  15562. _sg_free(p->bindgroups); p->bindgroups = 0;
  15563. _sg_pool_discard(&p->pool);
  15564. }
  15565. _SOKOL_PRIVATE _sg_wgpu_bindgroup_t* _sg_wgpu_bindgroup_at(uint32_t bg_id) {
  15566. SOKOL_ASSERT(SG_INVALID_ID != bg_id);
  15567. _sg_wgpu_bindgroups_pool_t* p = &_sg.wgpu.bindgroups_pool;
  15568. int slot_index = _sg_slot_index(bg_id);
  15569. SOKOL_ASSERT((slot_index > _SG_INVALID_SLOT_INDEX) && (slot_index < p->pool.size));
  15570. return &p->bindgroups[slot_index];
  15571. }
  15572. _SOKOL_PRIVATE _sg_wgpu_bindgroup_t* _sg_wgpu_lookup_bindgroup(uint32_t bg_id) {
  15573. if (SG_INVALID_ID != bg_id) {
  15574. _sg_wgpu_bindgroup_t* bg = _sg_wgpu_bindgroup_at(bg_id);
  15575. if (bg->slot.id == bg_id) {
  15576. return bg;
  15577. }
  15578. }
  15579. return 0;
  15580. }
  15581. _SOKOL_PRIVATE _sg_wgpu_bindgroup_handle_t _sg_wgpu_alloc_bindgroup(void) {
  15582. _sg_wgpu_bindgroups_pool_t* p = &_sg.wgpu.bindgroups_pool;
  15583. _sg_wgpu_bindgroup_handle_t res;
  15584. int slot_index = _sg_pool_alloc_index(&p->pool);
  15585. if (_SG_INVALID_SLOT_INDEX != slot_index) {
  15586. res.id = _sg_slot_alloc(&p->pool, &p->bindgroups[slot_index].slot, slot_index);
  15587. } else {
  15588. res.id = SG_INVALID_ID;
  15589. _SG_ERROR(WGPU_BINDGROUPS_POOL_EXHAUSTED);
  15590. }
  15591. return res;
  15592. }
  15593. _SOKOL_PRIVATE void _sg_wgpu_dealloc_bindgroup(_sg_wgpu_bindgroup_t* bg) {
  15594. SOKOL_ASSERT(bg && (bg->slot.state == SG_RESOURCESTATE_ALLOC) && (bg->slot.id != SG_INVALID_ID));
  15595. _sg_wgpu_bindgroups_pool_t* p = &_sg.wgpu.bindgroups_pool;
  15596. _sg_pool_free_index(&p->pool, _sg_slot_index(bg->slot.id));
  15597. _sg_slot_reset(&bg->slot);
  15598. }
  15599. _SOKOL_PRIVATE void _sg_wgpu_reset_bindgroup_to_alloc_state(_sg_wgpu_bindgroup_t* bg) {
  15600. SOKOL_ASSERT(bg);
  15601. _sg_slot_t slot = bg->slot;
  15602. _sg_clear(bg, sizeof(_sg_wgpu_bindgroup_t));
  15603. bg->slot = slot;
  15604. bg->slot.state = SG_RESOURCESTATE_ALLOC;
  15605. }
  15606. // MurmurHash64B (see: https://github.com/aappleby/smhasher/blob/61a0530f28277f2e850bfc39600ce61d02b518de/src/MurmurHash2.cpp#L142)
  15607. _SOKOL_PRIVATE uint64_t _sg_wgpu_hash(const void* key, int len, uint64_t seed) {
  15608. const uint32_t m = 0x5bd1e995;
  15609. const int r = 24;
  15610. uint32_t h1 = (uint32_t)seed ^ (uint32_t)len;
  15611. uint32_t h2 = (uint32_t)(seed >> 32);
  15612. const uint32_t * data = (const uint32_t *)key;
  15613. while (len >= 8) {
  15614. uint32_t k1 = *data++;
  15615. k1 *= m; k1 ^= k1 >> r; k1 *= m;
  15616. h1 *= m; h1 ^= k1;
  15617. len -= 4;
  15618. uint32_t k2 = *data++;
  15619. k2 *= m; k2 ^= k2 >> r; k2 *= m;
  15620. h2 *= m; h2 ^= k2;
  15621. len -= 4;
  15622. }
  15623. if (len >= 4) {
  15624. uint32_t k1 = *data++;
  15625. k1 *= m; k1 ^= k1 >> r; k1 *= m;
  15626. h1 *= m; h1 ^= k1;
  15627. len -= 4;
  15628. }
  15629. switch(len) {
  15630. case 3: h2 ^= (uint32_t)(((unsigned char*)data)[2] << 16);
  15631. // fall through
  15632. case 2: h2 ^= (uint32_t)(((unsigned char*)data)[1] << 8);
  15633. // fall through
  15634. case 1: h2 ^= ((unsigned char*)data)[0];
  15635. // fall through
  15636. h2 *= m;
  15637. };
  15638. h1 ^= h2 >> 18; h1 *= m;
  15639. h2 ^= h1 >> 22; h2 *= m;
  15640. h1 ^= h2 >> 17; h1 *= m;
  15641. h2 ^= h1 >> 19; h2 *= m;
  15642. uint64_t h = h1;
  15643. h = (h << 32) | h2;
  15644. return h;
  15645. }
  15646. _SOKOL_PRIVATE uint64_t _sg_wgpu_bindgroups_cache_item(_sg_wgpu_bindgroups_cache_item_type_t type, uint8_t wgpu_binding, uint32_t id, uint32_t uninit_count) {
  15647. const uint64_t bb = wgpu_binding;
  15648. const uint64_t t = type & 3;
  15649. const uint64_t ccccc = uninit_count & ((1 << 22) - 1);
  15650. const uint64_t iiiiiiii = id;
  15651. return (bb << 56) | (t << 54) | (ccccc << 32) | iiiiiiii;
  15652. }
  15653. _SOKOL_PRIVATE uint64_t _sg_wgpu_bindgroups_cache_pip_item(const _sg_slot_t* slot) {
  15654. return _sg_wgpu_bindgroups_cache_item(_SG_WGPU_BINDGROUPSCACHEITEMTYPE_PIPELINE, 0xFF, slot->id, slot->uninit_count);
  15655. }
  15656. _SOKOL_PRIVATE uint64_t _sg_wgpu_bindgroups_cache_view_item(uint8_t wgpu_binding, const _sg_slot_t* slot) {
  15657. return _sg_wgpu_bindgroups_cache_item(_SG_WGPU_BINDGROUPSCACHEITEMTYPE_VIEW, wgpu_binding, slot->id, slot->uninit_count);
  15658. }
  15659. _SOKOL_PRIVATE uint64_t _sg_wgpu_bindgroups_cache_sampler_item(uint8_t wgpu_binding, const _sg_slot_t* slot) {
  15660. return _sg_wgpu_bindgroups_cache_item(_SG_WGPU_BINDGROUPSCACHEITEMTYPE_SAMPLER, wgpu_binding, slot->id, slot->uninit_count);
  15661. }
  15662. _SOKOL_PRIVATE void _sg_wgpu_init_bindgroups_cache_key(_sg_wgpu_bindgroups_cache_key_t* key, const _sg_bindings_ptrs_t* bnd) {
  15663. SOKOL_ASSERT(bnd);
  15664. SOKOL_ASSERT(bnd->pip);
  15665. const _sg_shader_t* shd = _sg_shader_ref_ptr(&bnd->pip->cmn.shader);
  15666. _sg_clear(key->items, sizeof(key->items));
  15667. key->items[0] = _sg_wgpu_bindgroups_cache_pip_item(&bnd->pip->slot);
  15668. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  15669. if (shd->cmn.views[i].stage == SG_SHADERSTAGE_NONE) {
  15670. continue;
  15671. }
  15672. SOKOL_ASSERT(bnd->views[i]);
  15673. const size_t item_idx = i + 1;
  15674. SOKOL_ASSERT(item_idx < _SG_WGPU_BINDGROUPSCACHEKEY_NUM_ITEMS);
  15675. SOKOL_ASSERT(0 == key->items[item_idx]);
  15676. const uint8_t wgpu_binding = shd->wgpu.view_grp1_bnd_n[i];
  15677. key->items[item_idx] = _sg_wgpu_bindgroups_cache_view_item(wgpu_binding, &bnd->views[i]->slot);
  15678. }
  15679. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  15680. if (shd->cmn.samplers[i].stage == SG_SHADERSTAGE_NONE) {
  15681. continue;
  15682. }
  15683. SOKOL_ASSERT(bnd->smps[i]);
  15684. const size_t item_idx = i + 1 + SG_MAX_VIEW_BINDSLOTS;
  15685. SOKOL_ASSERT(item_idx < _SG_WGPU_BINDGROUPSCACHEKEY_NUM_ITEMS);
  15686. SOKOL_ASSERT(0 == key->items[item_idx]);
  15687. const uint8_t wgpu_binding = shd->wgpu.smp_grp1_bnd_n[i];
  15688. key->items[item_idx] = _sg_wgpu_bindgroups_cache_sampler_item(wgpu_binding, &bnd->smps[i]->slot);
  15689. }
  15690. key->hash = _sg_wgpu_hash(&key->items, (int)sizeof(key->items), 0x1234567887654321);
  15691. }
  15692. _SOKOL_PRIVATE bool _sg_wgpu_compare_bindgroups_cache_key(_sg_wgpu_bindgroups_cache_key_t* k0, _sg_wgpu_bindgroups_cache_key_t* k1) {
  15693. SOKOL_ASSERT(k0 && k1);
  15694. if (k0->hash != k1->hash) {
  15695. return false;
  15696. }
  15697. if (memcmp(&k0->items, &k1->items, sizeof(k0->items)) != 0) {
  15698. _sg_stats_inc(wgpu.bindings.num_bindgroup_cache_hash_vs_key_mismatch);
  15699. return false;
  15700. }
  15701. return true;
  15702. }
  15703. _SOKOL_PRIVATE _sg_wgpu_bindgroup_t* _sg_wgpu_create_bindgroup(_sg_bindings_ptrs_t* bnd) {
  15704. SOKOL_ASSERT(_sg.wgpu.dev);
  15705. SOKOL_ASSERT(bnd->pip);
  15706. const _sg_shader_t* shd = _sg_shader_ref_ptr(&bnd->pip->cmn.shader);
  15707. _sg_stats_inc(wgpu.bindings.num_create_bindgroup);
  15708. _sg_wgpu_bindgroup_handle_t bg_id = _sg_wgpu_alloc_bindgroup();
  15709. if (bg_id.id == SG_INVALID_ID) {
  15710. return 0;
  15711. }
  15712. _sg_wgpu_bindgroup_t* bg = _sg_wgpu_bindgroup_at(bg_id.id);
  15713. SOKOL_ASSERT(bg && (bg->slot.state == SG_RESOURCESTATE_ALLOC));
  15714. // create wgpu bindgroup object (also see _sg_wgpu_create_shader())
  15715. WGPUBindGroupLayout bgl = shd->wgpu.bgl_view_smp;
  15716. SOKOL_ASSERT(bgl);
  15717. _SG_STRUCT(WGPUBindGroupEntry, bg_entries[_SG_WGPU_MAX_VIEW_SMP_BINDGROUP_ENTRIES]);
  15718. size_t bgl_index = 0;
  15719. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  15720. if (shd->cmn.views[i].stage == SG_SHADERSTAGE_NONE) {
  15721. continue;
  15722. }
  15723. const _sg_view_t* view = bnd->views[i];
  15724. SOKOL_ASSERT(view);
  15725. SOKOL_ASSERT(bgl_index < _SG_WGPU_MAX_VIEW_SMP_BINDGROUP_ENTRIES);
  15726. WGPUBindGroupEntry* bg_entry = &bg_entries[bgl_index];
  15727. bg_entry->binding = shd->wgpu.view_grp1_bnd_n[i];
  15728. if (view->cmn.type == SG_VIEWTYPE_STORAGEBUFFER) {
  15729. const _sg_buffer_t* buf = _sg_buffer_ref_ptr(&view->cmn.buf.ref);
  15730. SOKOL_ASSERT(buf->wgpu.buf);
  15731. SOKOL_ASSERT(view->cmn.buf.offset < buf->cmn.size);
  15732. bg_entry->buffer = buf->wgpu.buf;
  15733. bg_entry->offset = (uint64_t)view->cmn.buf.offset;
  15734. bg_entry->size = (uint64_t)(buf->cmn.size - view->cmn.buf.offset);
  15735. } else {
  15736. SOKOL_ASSERT(view->wgpu.view);
  15737. bg_entry->textureView = view->wgpu.view;
  15738. }
  15739. bgl_index += 1;
  15740. }
  15741. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  15742. if (shd->cmn.samplers[i].stage == SG_SHADERSTAGE_NONE) {
  15743. continue;
  15744. }
  15745. SOKOL_ASSERT(bnd->smps[i]);
  15746. SOKOL_ASSERT(bgl_index < _SG_WGPU_MAX_VIEW_SMP_BINDGROUP_ENTRIES);
  15747. WGPUBindGroupEntry* bg_entry = &bg_entries[bgl_index];
  15748. bg_entry->binding = shd->wgpu.smp_grp1_bnd_n[i];
  15749. bg_entry->sampler = bnd->smps[i]->wgpu.smp;
  15750. bgl_index += 1;
  15751. }
  15752. _SG_STRUCT(WGPUBindGroupDescriptor, bg_desc);
  15753. bg_desc.layout = bgl;
  15754. bg_desc.entryCount = bgl_index;
  15755. bg_desc.entries = bg_entries;
  15756. bg->bindgroup = wgpuDeviceCreateBindGroup(_sg.wgpu.dev, &bg_desc);
  15757. if (bg->bindgroup == 0) {
  15758. _SG_ERROR(WGPU_CREATEBINDGROUP_FAILED);
  15759. bg->slot.state = SG_RESOURCESTATE_FAILED;
  15760. return bg;
  15761. }
  15762. _sg_wgpu_init_bindgroups_cache_key(&bg->key, bnd);
  15763. bg->slot.state = SG_RESOURCESTATE_VALID;
  15764. return bg;
  15765. }
  15766. _SOKOL_PRIVATE void _sg_wgpu_discard_bindgroup(_sg_wgpu_bindgroup_t* bg) {
  15767. SOKOL_ASSERT(bg);
  15768. _sg_stats_inc(wgpu.bindings.num_discard_bindgroup);
  15769. if (bg->slot.state == SG_RESOURCESTATE_VALID) {
  15770. if (bg->bindgroup) {
  15771. wgpuBindGroupRelease(bg->bindgroup);
  15772. bg->bindgroup = 0;
  15773. }
  15774. _sg_wgpu_reset_bindgroup_to_alloc_state(bg);
  15775. SOKOL_ASSERT(bg->slot.state == SG_RESOURCESTATE_ALLOC);
  15776. }
  15777. if (bg->slot.state == SG_RESOURCESTATE_ALLOC) {
  15778. _sg_wgpu_dealloc_bindgroup(bg);
  15779. SOKOL_ASSERT(bg->slot.state == SG_RESOURCESTATE_INITIAL);
  15780. }
  15781. }
  15782. _SOKOL_PRIVATE void _sg_wgpu_discard_all_bindgroups(void) {
  15783. _sg_wgpu_bindgroups_pool_t* p = &_sg.wgpu.bindgroups_pool;
  15784. for (int i = 0; i < p->pool.size; i++) {
  15785. sg_resource_state state = p->bindgroups[i].slot.state;
  15786. if ((state == SG_RESOURCESTATE_VALID) || (state == SG_RESOURCESTATE_FAILED)) {
  15787. _sg_wgpu_discard_bindgroup(&p->bindgroups[i]);
  15788. }
  15789. }
  15790. }
  15791. _SOKOL_PRIVATE void _sg_wgpu_bindgroups_cache_init(const sg_desc* desc) {
  15792. SOKOL_ASSERT(desc);
  15793. SOKOL_ASSERT(_sg.wgpu.bindgroups_cache.num == 0);
  15794. SOKOL_ASSERT(_sg.wgpu.bindgroups_cache.index_mask == 0);
  15795. SOKOL_ASSERT(_sg.wgpu.bindgroups_cache.items == 0);
  15796. const int num = desc->wgpu.bindgroups_cache_size;
  15797. if (num <= 1) {
  15798. _SG_PANIC(WGPU_BINDGROUPSCACHE_SIZE_GREATER_ONE);
  15799. }
  15800. if (!_sg_ispow2(num)) {
  15801. _SG_PANIC(WGPU_BINDGROUPSCACHE_SIZE_POW2);
  15802. }
  15803. _sg.wgpu.bindgroups_cache.num = (uint32_t)desc->wgpu.bindgroups_cache_size;
  15804. _sg.wgpu.bindgroups_cache.index_mask = _sg.wgpu.bindgroups_cache.num - 1;
  15805. size_t size_in_bytes = sizeof(_sg_wgpu_bindgroup_handle_t) * (size_t)num;
  15806. _sg.wgpu.bindgroups_cache.items = (_sg_wgpu_bindgroup_handle_t*)_sg_malloc_clear(size_in_bytes);
  15807. }
  15808. _SOKOL_PRIVATE void _sg_wgpu_bindgroups_cache_discard(void) {
  15809. if (_sg.wgpu.bindgroups_cache.items) {
  15810. _sg_free(_sg.wgpu.bindgroups_cache.items);
  15811. _sg.wgpu.bindgroups_cache.items = 0;
  15812. }
  15813. _sg.wgpu.bindgroups_cache.num = 0;
  15814. _sg.wgpu.bindgroups_cache.index_mask = 0;
  15815. }
  15816. _SOKOL_PRIVATE void _sg_wgpu_bindgroups_cache_set(uint64_t hash, uint32_t bg_id) {
  15817. uint32_t index = hash & _sg.wgpu.bindgroups_cache.index_mask;
  15818. SOKOL_ASSERT(index < _sg.wgpu.bindgroups_cache.num);
  15819. SOKOL_ASSERT(_sg.wgpu.bindgroups_cache.items);
  15820. _sg.wgpu.bindgroups_cache.items[index].id = bg_id;
  15821. }
  15822. _SOKOL_PRIVATE uint32_t _sg_wgpu_bindgroups_cache_get(uint64_t hash) {
  15823. uint32_t index = hash & _sg.wgpu.bindgroups_cache.index_mask;
  15824. SOKOL_ASSERT(index < _sg.wgpu.bindgroups_cache.num);
  15825. SOKOL_ASSERT(_sg.wgpu.bindgroups_cache.items);
  15826. return _sg.wgpu.bindgroups_cache.items[index].id;
  15827. }
  15828. // called from wgpu resource destroy functions to also invalidate any
  15829. // bindgroups cache slot and bindgroup referencing that resource
  15830. _SOKOL_PRIVATE void _sg_wgpu_bindgroups_cache_invalidate(_sg_wgpu_bindgroups_cache_item_type_t type, const _sg_slot_t* slot) {
  15831. const uint64_t key_mask = _sg_wgpu_bindgroups_cache_item(type, 0xFF, 0xFFFFFFFF, 0xFFFFFFFF);
  15832. const uint64_t key_item = _sg_wgpu_bindgroups_cache_item(type, 0, slot->id, slot->uninit_count) & key_mask;
  15833. SOKOL_ASSERT(_sg.wgpu.bindgroups_cache.items);
  15834. for (uint32_t cache_item_idx = 0; cache_item_idx < _sg.wgpu.bindgroups_cache.num; cache_item_idx++) {
  15835. const uint32_t bg_id = _sg.wgpu.bindgroups_cache.items[cache_item_idx].id;
  15836. if (bg_id != SG_INVALID_ID) {
  15837. _sg_wgpu_bindgroup_t* bg = _sg_wgpu_lookup_bindgroup(bg_id);
  15838. SOKOL_ASSERT(bg && (bg->slot.state == SG_RESOURCESTATE_VALID));
  15839. // check if resource is in bindgroup, if yes discard bindgroup and invalidate cache slot
  15840. bool invalidate_cache_item = false;
  15841. for (int key_item_idx = 0; key_item_idx < _SG_WGPU_BINDGROUPSCACHEKEY_NUM_ITEMS; key_item_idx++) {
  15842. if ((bg->key.items[key_item_idx] & key_mask) == key_item) {
  15843. invalidate_cache_item = true;
  15844. break;
  15845. }
  15846. }
  15847. if (invalidate_cache_item) {
  15848. _sg_wgpu_discard_bindgroup(bg); bg = 0;
  15849. _sg_wgpu_bindgroups_cache_set(cache_item_idx, SG_INVALID_ID);
  15850. _sg_stats_inc(wgpu.bindings.num_bindgroup_cache_invalidates);
  15851. }
  15852. }
  15853. }
  15854. }
  15855. _SOKOL_PRIVATE void _sg_wgpu_bindings_cache_clear(void) {
  15856. memset(&_sg.wgpu.bindings_cache, 0, sizeof(_sg.wgpu.bindings_cache));
  15857. }
  15858. _SOKOL_PRIVATE bool _sg_wgpu_bindings_cache_vb_dirty(size_t index, const _sg_buffer_t* vb, uint64_t offset) {
  15859. SOKOL_ASSERT(index < SG_MAX_VERTEXBUFFER_BINDSLOTS);
  15860. if (vb) {
  15861. return (_sg.wgpu.bindings_cache.vbs[index].buffer.id != vb->slot.id)
  15862. || (_sg.wgpu.bindings_cache.vbs[index].offset != offset);
  15863. } else {
  15864. return _sg.wgpu.bindings_cache.vbs[index].buffer.id != SG_INVALID_ID;
  15865. }
  15866. }
  15867. _SOKOL_PRIVATE void _sg_wgpu_bindings_cache_vb_update(size_t index, const _sg_buffer_t* vb, uint64_t offset) {
  15868. SOKOL_ASSERT(index < SG_MAX_VERTEXBUFFER_BINDSLOTS);
  15869. if (vb) {
  15870. _sg.wgpu.bindings_cache.vbs[index].buffer.id = vb->slot.id;
  15871. _sg.wgpu.bindings_cache.vbs[index].offset = offset;
  15872. } else {
  15873. _sg.wgpu.bindings_cache.vbs[index].buffer.id = SG_INVALID_ID;
  15874. _sg.wgpu.bindings_cache.vbs[index].offset = 0;
  15875. }
  15876. }
  15877. _SOKOL_PRIVATE bool _sg_wgpu_bindings_cache_ib_dirty(const _sg_buffer_t* ib, uint64_t offset) {
  15878. if (ib) {
  15879. return (_sg.wgpu.bindings_cache.ib.buffer.id != ib->slot.id)
  15880. || (_sg.wgpu.bindings_cache.ib.offset != offset);
  15881. } else {
  15882. return _sg.wgpu.bindings_cache.ib.buffer.id != SG_INVALID_ID;
  15883. }
  15884. }
  15885. _SOKOL_PRIVATE void _sg_wgpu_bindings_cache_ib_update(const _sg_buffer_t* ib, uint64_t offset) {
  15886. if (ib) {
  15887. _sg.wgpu.bindings_cache.ib.buffer.id = ib->slot.id;
  15888. _sg.wgpu.bindings_cache.ib.offset = offset;
  15889. } else {
  15890. _sg.wgpu.bindings_cache.ib.buffer.id = SG_INVALID_ID;
  15891. _sg.wgpu.bindings_cache.ib.offset = 0;
  15892. }
  15893. }
  15894. _SOKOL_PRIVATE bool _sg_wgpu_bindings_cache_bg_dirty(const _sg_wgpu_bindgroup_t* bg) {
  15895. if (bg) {
  15896. return _sg.wgpu.bindings_cache.bg.id != bg->slot.id;
  15897. } else {
  15898. return _sg.wgpu.bindings_cache.bg.id != SG_INVALID_ID;
  15899. }
  15900. }
  15901. _SOKOL_PRIVATE void _sg_wgpu_bindings_cache_bg_update(const _sg_wgpu_bindgroup_t* bg) {
  15902. if (bg) {
  15903. _sg.wgpu.bindings_cache.bg.id = bg->slot.id;
  15904. } else {
  15905. _sg.wgpu.bindings_cache.bg.id = SG_INVALID_ID;
  15906. }
  15907. }
  15908. _SOKOL_PRIVATE void _sg_wgpu_set_bindgroup(uint32_t bg_idx, _sg_wgpu_bindgroup_t* bg) {
  15909. if (_sg_wgpu_bindings_cache_bg_dirty(bg)) {
  15910. _sg_wgpu_bindings_cache_bg_update(bg);
  15911. _sg_stats_inc(wgpu.bindings.num_set_bindgroup);
  15912. if (_sg.cur_pass.is_compute) {
  15913. SOKOL_ASSERT(_sg.wgpu.cpass_enc);
  15914. if (bg) {
  15915. SOKOL_ASSERT(bg->slot.state == SG_RESOURCESTATE_VALID);
  15916. SOKOL_ASSERT(bg->bindgroup);
  15917. wgpuComputePassEncoderSetBindGroup(_sg.wgpu.cpass_enc, bg_idx, bg->bindgroup, 0, 0);
  15918. } else {
  15919. wgpuComputePassEncoderSetBindGroup(_sg.wgpu.cpass_enc, bg_idx, 0, 0, 0);
  15920. }
  15921. } else {
  15922. SOKOL_ASSERT(_sg.wgpu.rpass_enc);
  15923. if (bg) {
  15924. SOKOL_ASSERT(bg->slot.state == SG_RESOURCESTATE_VALID);
  15925. SOKOL_ASSERT(bg->bindgroup);
  15926. wgpuRenderPassEncoderSetBindGroup(_sg.wgpu.rpass_enc, bg_idx, bg->bindgroup, 0, 0);
  15927. } else {
  15928. wgpuRenderPassEncoderSetBindGroup(_sg.wgpu.rpass_enc, bg_idx, 0, 0, 0);
  15929. }
  15930. }
  15931. } else {
  15932. _sg_stats_inc(wgpu.bindings.num_skip_redundant_bindgroup);
  15933. }
  15934. }
  15935. _SOKOL_PRIVATE bool _sg_wgpu_apply_bindings_bindgroup(_sg_bindings_ptrs_t* bnd) {
  15936. if (!_sg.desc.wgpu.disable_bindgroups_cache) {
  15937. _sg_wgpu_bindgroup_t* bg = 0;
  15938. _sg_wgpu_bindgroups_cache_key_t key;
  15939. _sg_wgpu_init_bindgroups_cache_key(&key, bnd);
  15940. uint32_t bg_id = _sg_wgpu_bindgroups_cache_get(key.hash);
  15941. if (bg_id != SG_INVALID_ID) {
  15942. // potential cache hit
  15943. bg = _sg_wgpu_lookup_bindgroup(bg_id);
  15944. SOKOL_ASSERT(bg && (bg->slot.state == SG_RESOURCESTATE_VALID));
  15945. if (!_sg_wgpu_compare_bindgroups_cache_key(&key, &bg->key)) {
  15946. // cache collision, need to delete cached bindgroup
  15947. _sg_stats_inc(wgpu.bindings.num_bindgroup_cache_collisions);
  15948. _sg_wgpu_discard_bindgroup(bg);
  15949. _sg_wgpu_bindgroups_cache_set(key.hash, SG_INVALID_ID);
  15950. bg = 0;
  15951. } else {
  15952. _sg_stats_inc(wgpu.bindings.num_bindgroup_cache_hits);
  15953. }
  15954. } else {
  15955. _sg_stats_inc(wgpu.bindings.num_bindgroup_cache_misses);
  15956. }
  15957. if (bg == 0) {
  15958. // either no cache entry yet, or cache collision, create new bindgroup and store in cache
  15959. bg = _sg_wgpu_create_bindgroup(bnd);
  15960. _sg_wgpu_bindgroups_cache_set(key.hash, bg->slot.id);
  15961. }
  15962. if (bg && bg->slot.state == SG_RESOURCESTATE_VALID) {
  15963. _sg_wgpu_set_bindgroup(_SG_WGPU_VIEW_SMP_BINDGROUP_INDEX, bg);
  15964. } else {
  15965. return false;
  15966. }
  15967. } else {
  15968. // bindgroups cache disabled, create and destroy bindgroup on the fly (expensive!)
  15969. _sg_wgpu_bindgroup_t* bg = _sg_wgpu_create_bindgroup(bnd);
  15970. if (bg) {
  15971. if (bg->slot.state == SG_RESOURCESTATE_VALID) {
  15972. _sg_wgpu_set_bindgroup(_SG_WGPU_VIEW_SMP_BINDGROUP_INDEX, bg);
  15973. }
  15974. _sg_wgpu_discard_bindgroup(bg);
  15975. } else {
  15976. return false;
  15977. }
  15978. }
  15979. return true;
  15980. }
  15981. _SOKOL_PRIVATE bool _sg_wgpu_apply_index_buffer(_sg_bindings_ptrs_t* bnd) {
  15982. SOKOL_ASSERT(_sg.wgpu.rpass_enc);
  15983. const _sg_buffer_t* ib = bnd->ib;
  15984. uint64_t offset = (uint64_t)bnd->ib_offset;
  15985. if (_sg_wgpu_bindings_cache_ib_dirty(ib, offset)) {
  15986. _sg_wgpu_bindings_cache_ib_update(ib, offset);
  15987. if (ib) {
  15988. const WGPUIndexFormat format = _sg_wgpu_indexformat(bnd->pip->cmn.index_type);
  15989. const uint64_t buf_size = (uint64_t)ib->cmn.size;
  15990. SOKOL_ASSERT(buf_size > offset);
  15991. const uint64_t max_bytes = buf_size - offset;
  15992. wgpuRenderPassEncoderSetIndexBuffer(_sg.wgpu.rpass_enc, ib->wgpu.buf, format, offset, max_bytes);
  15993. /*
  15994. NOTE: as per webgpu spec setIndexBuffer does not accept a null pointer
  15995. } else {
  15996. wgpuRenderPassEncoderSetIndexBuffer(_sg.wgpu.rpass_enc, 0, WGPUIndexFormat_Undefined, 0, 0);
  15997. */
  15998. }
  15999. _sg_stats_inc(wgpu.bindings.num_set_index_buffer);
  16000. } else {
  16001. _sg_stats_inc(wgpu.bindings.num_skip_redundant_index_buffer);
  16002. }
  16003. return true;
  16004. }
  16005. _SOKOL_PRIVATE bool _sg_wgpu_apply_vertex_buffers(_sg_bindings_ptrs_t* bnd) {
  16006. SOKOL_ASSERT(_sg.wgpu.rpass_enc);
  16007. for (uint32_t slot = 0; slot < SG_MAX_VERTEXBUFFER_BINDSLOTS; slot++) {
  16008. const _sg_buffer_t* vb = bnd->vbs[slot];
  16009. const uint64_t offset = (uint64_t)bnd->vb_offsets[slot];
  16010. if (_sg_wgpu_bindings_cache_vb_dirty(slot, vb, offset)) {
  16011. _sg_wgpu_bindings_cache_vb_update(slot, vb, offset);
  16012. if (vb) {
  16013. const uint64_t buf_size = (uint64_t)vb->cmn.size;
  16014. SOKOL_ASSERT(buf_size > offset);
  16015. const uint64_t max_bytes = buf_size - offset;
  16016. wgpuRenderPassEncoderSetVertexBuffer(_sg.wgpu.rpass_enc, slot, vb->wgpu.buf, offset, max_bytes);
  16017. } else {
  16018. wgpuRenderPassEncoderSetVertexBuffer(_sg.wgpu.rpass_enc, slot, 0, 0, 0);
  16019. }
  16020. _sg_stats_inc(wgpu.bindings.num_set_vertex_buffer);
  16021. } else {
  16022. _sg_stats_inc(wgpu.bindings.num_skip_redundant_vertex_buffer);
  16023. }
  16024. }
  16025. return true;
  16026. }
  16027. _SOKOL_PRIVATE void _sg_wgpu_setup_backend(const sg_desc* desc) {
  16028. SOKOL_ASSERT(desc);
  16029. SOKOL_ASSERT(desc->environment.wgpu.device);
  16030. SOKOL_ASSERT(desc->uniform_buffer_size > 0);
  16031. _sg.wgpu.valid = true;
  16032. _sg.wgpu.dev = (WGPUDevice) desc->environment.wgpu.device;
  16033. _sg.wgpu.queue = wgpuDeviceGetQueue(_sg.wgpu.dev);
  16034. SOKOL_ASSERT(_sg.wgpu.queue);
  16035. _sg_wgpu_init_caps();
  16036. _sg_wgpu_uniform_system_init(desc);
  16037. _sg_wgpu_bindgroups_pool_init(desc);
  16038. _sg_wgpu_bindgroups_cache_init(desc);
  16039. _sg_wgpu_bindings_cache_clear();
  16040. }
  16041. _SOKOL_PRIVATE void _sg_wgpu_discard_backend(void) {
  16042. SOKOL_ASSERT(_sg.wgpu.valid);
  16043. _sg.wgpu.valid = false;
  16044. _sg_wgpu_discard_all_bindgroups();
  16045. _sg_wgpu_bindgroups_cache_discard();
  16046. _sg_wgpu_bindgroups_pool_discard();
  16047. _sg_wgpu_uniform_system_discard();
  16048. // the command encoder is usually released in sg_commit()
  16049. if (_sg.wgpu.cmd_enc) {
  16050. wgpuCommandEncoderRelease(_sg.wgpu.cmd_enc); _sg.wgpu.cmd_enc = 0;
  16051. }
  16052. wgpuQueueRelease(_sg.wgpu.queue); _sg.wgpu.queue = 0;
  16053. }
  16054. _SOKOL_PRIVATE void _sg_wgpu_reset_state_cache(void) {
  16055. _sg_wgpu_bindings_cache_clear();
  16056. }
  16057. _SOKOL_PRIVATE sg_resource_state _sg_wgpu_create_buffer(_sg_buffer_t* buf, const sg_buffer_desc* desc) {
  16058. SOKOL_ASSERT(buf && desc);
  16059. SOKOL_ASSERT(buf->cmn.size > 0);
  16060. const bool injected = (0 != desc->wgpu_buffer);
  16061. if (injected) {
  16062. buf->wgpu.buf = (WGPUBuffer) desc->wgpu_buffer;
  16063. wgpuBufferAddRef(buf->wgpu.buf);
  16064. } else {
  16065. // buffer mapping size must be multiple of 4, so round up buffer size (only a problem
  16066. // with index buffers containing odd number of indices)
  16067. const uint64_t wgpu_buf_size = _sg_roundup_u64((uint64_t)buf->cmn.size, 4);
  16068. const bool map_at_creation = buf->cmn.usage.immutable && (desc->data.ptr);
  16069. _SG_STRUCT(WGPUBufferDescriptor, wgpu_buf_desc);
  16070. wgpu_buf_desc.usage = _sg_wgpu_buffer_usage(&buf->cmn.usage);
  16071. wgpu_buf_desc.size = wgpu_buf_size;
  16072. wgpu_buf_desc.mappedAtCreation = map_at_creation;
  16073. wgpu_buf_desc.label = _sg_wgpu_stringview(desc->label);
  16074. buf->wgpu.buf = wgpuDeviceCreateBuffer(_sg.wgpu.dev, &wgpu_buf_desc);
  16075. if (0 == buf->wgpu.buf) {
  16076. _SG_ERROR(WGPU_CREATE_BUFFER_FAILED);
  16077. return SG_RESOURCESTATE_FAILED;
  16078. }
  16079. if (map_at_creation) {
  16080. SOKOL_ASSERT(desc->data.ptr && (desc->data.size > 0));
  16081. SOKOL_ASSERT(desc->data.size <= (size_t)buf->cmn.size);
  16082. // FIXME: inefficient on WASM
  16083. void* ptr = wgpuBufferGetMappedRange(buf->wgpu.buf, 0, wgpu_buf_size);
  16084. SOKOL_ASSERT(ptr);
  16085. memcpy(ptr, desc->data.ptr, desc->data.size);
  16086. wgpuBufferUnmap(buf->wgpu.buf);
  16087. }
  16088. }
  16089. return SG_RESOURCESTATE_VALID;
  16090. }
  16091. _SOKOL_PRIVATE void _sg_wgpu_discard_buffer(_sg_buffer_t* buf) {
  16092. SOKOL_ASSERT(buf);
  16093. if (buf->wgpu.buf) {
  16094. wgpuBufferRelease(buf->wgpu.buf);
  16095. }
  16096. }
  16097. _SOKOL_PRIVATE void _sg_wgpu_copy_buffer_data(const _sg_buffer_t* buf, uint64_t offset, const sg_range* data) {
  16098. SOKOL_ASSERT((offset + data->size) <= (size_t)buf->cmn.size);
  16099. // WebGPU's write-buffer requires the size to be a multiple of four, so we may need to split the copy
  16100. // operation into two writeBuffer calls
  16101. uint64_t clamped_size = data->size & ~3UL;
  16102. uint64_t extra_size = data->size & 3UL;
  16103. SOKOL_ASSERT(extra_size < 4);
  16104. wgpuQueueWriteBuffer(_sg.wgpu.queue, buf->wgpu.buf, offset, data->ptr, clamped_size);
  16105. if (extra_size > 0) {
  16106. const uint64_t extra_src_offset = clamped_size;
  16107. const uint64_t extra_dst_offset = offset + clamped_size;
  16108. uint8_t extra_data[4] = { 0 };
  16109. const uint8_t* extra_src_ptr = ((uint8_t*)data->ptr) + extra_src_offset;
  16110. for (size_t i = 0; i < extra_size; i++) {
  16111. extra_data[i] = extra_src_ptr[i];
  16112. }
  16113. wgpuQueueWriteBuffer(_sg.wgpu.queue, buf->wgpu.buf, extra_dst_offset, extra_data, 4);
  16114. }
  16115. }
  16116. _SOKOL_PRIVATE void _sg_wgpu_copy_image_data(const _sg_image_t* img, const sg_image_data* data) {
  16117. _SG_STRUCT(WGPUTexelCopyBufferLayout, wgpu_layout);
  16118. _SG_STRUCT(WGPUTexelCopyTextureInfo, wgpu_copy_tex);
  16119. wgpu_copy_tex.texture = img->wgpu.tex;
  16120. wgpu_copy_tex.aspect = WGPUTextureAspect_All;
  16121. _SG_STRUCT(WGPUExtent3D, wgpu_extent);
  16122. for (int mip_index = 0; mip_index < img->cmn.num_mipmaps; mip_index++) {
  16123. wgpu_copy_tex.mipLevel = (uint32_t)mip_index;
  16124. int mip_width = _sg_miplevel_dim(img->cmn.width, mip_index);
  16125. int mip_height = _sg_miplevel_dim(img->cmn.height, mip_index);
  16126. int mip_slices = (img->cmn.type == SG_IMAGETYPE_3D) ? _sg_miplevel_dim(img->cmn.num_slices, mip_index) : img->cmn.num_slices;
  16127. const int row_pitch = _sg_row_pitch(img->cmn.pixel_format, mip_width, 1);
  16128. const int num_rows = _sg_num_rows(img->cmn.pixel_format, mip_height);
  16129. if (_sg_is_compressed_pixel_format(img->cmn.pixel_format)) {
  16130. mip_width = _sg_roundup(mip_width, 4);
  16131. mip_height = _sg_roundup(mip_height, 4);
  16132. }
  16133. wgpu_layout.bytesPerRow = (uint32_t)row_pitch;
  16134. wgpu_layout.rowsPerImage = (uint32_t)num_rows;
  16135. wgpu_extent.width = (uint32_t)mip_width;
  16136. wgpu_extent.height = (uint32_t)mip_height;
  16137. wgpu_extent.depthOrArrayLayers = (uint32_t)mip_slices;
  16138. const sg_range* mip_data = &data->mip_levels[mip_index];
  16139. wgpuQueueWriteTexture(_sg.wgpu.queue, &wgpu_copy_tex, mip_data->ptr, mip_data->size, &wgpu_layout, &wgpu_extent);
  16140. }
  16141. }
  16142. _SOKOL_PRIVATE sg_resource_state _sg_wgpu_create_image(_sg_image_t* img, const sg_image_desc* desc) {
  16143. SOKOL_ASSERT(img && desc);
  16144. const bool injected = (0 != desc->wgpu_texture);
  16145. if (injected) {
  16146. img->wgpu.tex = (WGPUTexture)desc->wgpu_texture;
  16147. wgpuTextureAddRef(img->wgpu.tex);
  16148. } else {
  16149. _SG_STRUCT(WGPUTextureDescriptor, wgpu_tex_desc);
  16150. wgpu_tex_desc.label = _sg_wgpu_stringview(desc->label);
  16151. wgpu_tex_desc.usage = WGPUTextureUsage_TextureBinding|WGPUTextureUsage_CopyDst;
  16152. if (desc->usage.color_attachment || desc->usage.resolve_attachment || desc->usage.depth_stencil_attachment) {
  16153. wgpu_tex_desc.usage |= WGPUTextureUsage_RenderAttachment;
  16154. }
  16155. if (desc->usage.storage_image) {
  16156. wgpu_tex_desc.usage |= WGPUTextureUsage_StorageBinding;
  16157. }
  16158. wgpu_tex_desc.dimension = _sg_wgpu_texture_dimension(img->cmn.type);
  16159. wgpu_tex_desc.size.width = (uint32_t) img->cmn.width;
  16160. wgpu_tex_desc.size.height = (uint32_t) img->cmn.height;
  16161. wgpu_tex_desc.size.depthOrArrayLayers = (uint32_t) img->cmn.num_slices;
  16162. wgpu_tex_desc.format = _sg_wgpu_textureformat(img->cmn.pixel_format);
  16163. wgpu_tex_desc.mipLevelCount = (uint32_t) img->cmn.num_mipmaps;
  16164. wgpu_tex_desc.sampleCount = (uint32_t) img->cmn.sample_count;
  16165. img->wgpu.tex = wgpuDeviceCreateTexture(_sg.wgpu.dev, &wgpu_tex_desc);
  16166. if (0 == img->wgpu.tex) {
  16167. _SG_ERROR(WGPU_CREATE_TEXTURE_FAILED);
  16168. return SG_RESOURCESTATE_FAILED;
  16169. }
  16170. if (desc->data.mip_levels[0].ptr) {
  16171. _sg_wgpu_copy_image_data(img, &desc->data);
  16172. }
  16173. }
  16174. return SG_RESOURCESTATE_VALID;
  16175. }
  16176. _SOKOL_PRIVATE void _sg_wgpu_discard_image(_sg_image_t* img) {
  16177. SOKOL_ASSERT(img);
  16178. if (img->wgpu.tex) {
  16179. wgpuTextureRelease(img->wgpu.tex);
  16180. img->wgpu.tex = 0;
  16181. }
  16182. }
  16183. _SOKOL_PRIVATE sg_resource_state _sg_wgpu_create_sampler(_sg_sampler_t* smp, const sg_sampler_desc* desc) {
  16184. SOKOL_ASSERT(smp && desc);
  16185. SOKOL_ASSERT(_sg.wgpu.dev);
  16186. const bool injected = (0 != desc->wgpu_sampler);
  16187. if (injected) {
  16188. smp->wgpu.smp = (WGPUSampler) desc->wgpu_sampler;
  16189. wgpuSamplerAddRef(smp->wgpu.smp);
  16190. } else {
  16191. _SG_STRUCT(WGPUSamplerDescriptor, wgpu_desc);
  16192. wgpu_desc.label = _sg_wgpu_stringview(desc->label);
  16193. wgpu_desc.addressModeU = _sg_wgpu_sampler_address_mode(desc->wrap_u);
  16194. wgpu_desc.addressModeV = _sg_wgpu_sampler_address_mode(desc->wrap_v);
  16195. wgpu_desc.addressModeW = _sg_wgpu_sampler_address_mode(desc->wrap_w);
  16196. wgpu_desc.magFilter = _sg_wgpu_sampler_minmag_filter(desc->mag_filter);
  16197. wgpu_desc.minFilter = _sg_wgpu_sampler_minmag_filter(desc->min_filter);
  16198. wgpu_desc.mipmapFilter = _sg_wgpu_sampler_mipmap_filter(desc->mipmap_filter);
  16199. wgpu_desc.lodMinClamp = desc->min_lod;
  16200. wgpu_desc.lodMaxClamp = desc->max_lod;
  16201. wgpu_desc.compare = _sg_wgpu_comparefunc(desc->compare);
  16202. if (wgpu_desc.compare == WGPUCompareFunction_Never) {
  16203. wgpu_desc.compare = WGPUCompareFunction_Undefined;
  16204. }
  16205. wgpu_desc.maxAnisotropy = (uint16_t)desc->max_anisotropy;
  16206. smp->wgpu.smp = wgpuDeviceCreateSampler(_sg.wgpu.dev, &wgpu_desc);
  16207. if (0 == smp->wgpu.smp) {
  16208. _SG_ERROR(WGPU_CREATE_SAMPLER_FAILED);
  16209. return SG_RESOURCESTATE_FAILED;
  16210. }
  16211. }
  16212. return SG_RESOURCESTATE_VALID;
  16213. }
  16214. _SOKOL_PRIVATE void _sg_wgpu_discard_sampler(_sg_sampler_t* smp) {
  16215. SOKOL_ASSERT(smp);
  16216. _sg_wgpu_bindgroups_cache_invalidate(_SG_WGPU_BINDGROUPSCACHEITEMTYPE_SAMPLER, &smp->slot);
  16217. if (smp->wgpu.smp) {
  16218. wgpuSamplerRelease(smp->wgpu.smp);
  16219. smp->wgpu.smp = 0;
  16220. }
  16221. }
  16222. _SOKOL_PRIVATE _sg_wgpu_shader_func_t _sg_wgpu_create_shader_func(const sg_shader_function* func, const char* label) {
  16223. SOKOL_ASSERT(func);
  16224. SOKOL_ASSERT(func->source);
  16225. SOKOL_ASSERT(func->entry);
  16226. _SG_STRUCT(_sg_wgpu_shader_func_t, res);
  16227. _sg_strcpy(&res.entry, func->entry);
  16228. _SG_STRUCT(WGPUShaderSourceWGSL, wgpu_shdsrc_wgsl);
  16229. wgpu_shdsrc_wgsl.chain.sType = WGPUSType_ShaderSourceWGSL;
  16230. wgpu_shdsrc_wgsl.code = _sg_wgpu_stringview(func->source);
  16231. _SG_STRUCT(WGPUShaderModuleDescriptor, wgpu_shdmod_desc);
  16232. wgpu_shdmod_desc.nextInChain = &wgpu_shdsrc_wgsl.chain;
  16233. wgpu_shdmod_desc.label = _sg_wgpu_stringview(label);
  16234. // NOTE: if compilation fails we won't actually find out in this call since
  16235. // it always returns a valid module handle, and the GetCompilationInfo() call
  16236. // is asynchronous
  16237. res.module = wgpuDeviceCreateShaderModule(_sg.wgpu.dev, &wgpu_shdmod_desc);
  16238. if (0 == res.module) {
  16239. _SG_ERROR(WGPU_CREATE_SHADER_MODULE_FAILED);
  16240. }
  16241. return res;
  16242. }
  16243. _SOKOL_PRIVATE void _sg_wgpu_discard_shader_func(_sg_wgpu_shader_func_t* func) {
  16244. if (func->module) {
  16245. wgpuShaderModuleRelease(func->module);
  16246. func->module = 0;
  16247. }
  16248. }
  16249. typedef struct { uint8_t sokol_slot, wgpu_slot; } _sg_wgpu_dynoffset_mapping_t;
  16250. _SOKOL_PRIVATE int _sg_wgpu_dynoffset_cmp(const void* a, const void* b) {
  16251. const _sg_wgpu_dynoffset_mapping_t* aa = (const _sg_wgpu_dynoffset_mapping_t*)a;
  16252. const _sg_wgpu_dynoffset_mapping_t* bb = (const _sg_wgpu_dynoffset_mapping_t*)b;
  16253. if (aa->wgpu_slot < bb->wgpu_slot) return -1;
  16254. else if (aa->wgpu_slot > bb->wgpu_slot) return 1;
  16255. return 0;
  16256. }
  16257. // NOTE: this is an out-of-range check for WGSL bindslots that's also active in release mode
  16258. _SOKOL_PRIVATE bool _sg_wgpu_ensure_wgsl_bindslot_ranges(const sg_shader_desc* desc) {
  16259. SOKOL_ASSERT(desc);
  16260. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  16261. const sg_shader_uniform_block* ub = &desc->uniform_blocks[i];
  16262. if (ub->stage != SG_SHADERSTAGE_NONE) {
  16263. if (ub->wgsl_group0_binding_n >= _SG_WGPU_MAX_UB_BINDGROUP_WGSL_SLOTS) {
  16264. _SG_ERROR(WGPU_UNIFORMBLOCK_WGSL_GROUP0_BINDING_OUT_OF_RANGE);
  16265. return false;
  16266. }
  16267. }
  16268. }
  16269. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  16270. const sg_shader_view* view = &desc->views[i];
  16271. if (view->texture.stage != SG_SHADERSTAGE_NONE) {
  16272. if (view->texture.wgsl_group1_binding_n >= _SG_WGPU_MAX_VIEW_SMP_BINDGROUP_WGSL_SLOTS) {
  16273. _SG_ERROR(WGPU_TEXTURE_WGSL_GROUP1_BINDING_OUT_OF_RANGE);
  16274. return false;
  16275. }
  16276. }
  16277. if (view->storage_buffer.stage != SG_SHADERSTAGE_NONE) {
  16278. if (view->storage_buffer.wgsl_group1_binding_n >= _SG_WGPU_MAX_VIEW_SMP_BINDGROUP_WGSL_SLOTS) {
  16279. _SG_ERROR(WGPU_STORAGEBUFFER_WGSL_GROUP1_BINDING_OUT_OF_RANGE);
  16280. return false;
  16281. }
  16282. }
  16283. if (view->storage_image.stage != SG_SHADERSTAGE_NONE) {
  16284. if (view->storage_image.wgsl_group1_binding_n >= _SG_WGPU_MAX_VIEW_SMP_BINDGROUP_WGSL_SLOTS) {
  16285. _SG_ERROR(WGPU_STORAGEIMAGE_WGSL_GROUP1_BINDING_OUT_OF_RANGE);
  16286. return false;
  16287. }
  16288. }
  16289. }
  16290. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  16291. const sg_shader_sampler* smp = &desc->samplers[i];
  16292. if (smp->stage != SG_SHADERSTAGE_NONE) {
  16293. if (smp->wgsl_group1_binding_n >= _SG_WGPU_MAX_VIEW_SMP_BINDGROUP_WGSL_SLOTS) {
  16294. _SG_ERROR(WGPU_SAMPLER_WGSL_GROUP1_BINDING_OUT_OF_RANGE);
  16295. return false;
  16296. }
  16297. }
  16298. }
  16299. return true;
  16300. }
  16301. _SOKOL_PRIVATE sg_resource_state _sg_wgpu_create_shader(_sg_shader_t* shd, const sg_shader_desc* desc) {
  16302. SOKOL_ASSERT(shd && desc);
  16303. SOKOL_ASSERT(shd->wgpu.vertex_func.module == 0);
  16304. SOKOL_ASSERT(shd->wgpu.fragment_func.module == 0);
  16305. SOKOL_ASSERT(shd->wgpu.compute_func.module == 0);
  16306. SOKOL_ASSERT(shd->wgpu.bgl_ub == 0);
  16307. SOKOL_ASSERT(shd->wgpu.bg_ub == 0);
  16308. SOKOL_ASSERT(shd->wgpu.bgl_view_smp == 0);
  16309. // do a release-mode bounds-check on wgsl bindslots, even though out-of-range
  16310. // bindslots can't cause out-of-bounds accesses in the wgpu backend, this
  16311. // is done to be consistent with the other backends
  16312. if (!_sg_wgpu_ensure_wgsl_bindslot_ranges(desc)) {
  16313. return SG_RESOURCESTATE_FAILED;
  16314. }
  16315. // build shader modules
  16316. bool shd_valid = true;
  16317. if (desc->vertex_func.source) {
  16318. shd->wgpu.vertex_func = _sg_wgpu_create_shader_func(&desc->vertex_func, desc->label);
  16319. shd_valid &= shd->wgpu.vertex_func.module != 0;
  16320. }
  16321. if (desc->fragment_func.source) {
  16322. shd->wgpu.fragment_func = _sg_wgpu_create_shader_func(&desc->fragment_func, desc->label);
  16323. shd_valid &= shd->wgpu.fragment_func.module != 0;
  16324. }
  16325. if (desc->compute_func.source) {
  16326. shd->wgpu.compute_func = _sg_wgpu_create_shader_func(&desc->compute_func, desc->label);
  16327. shd_valid &= shd->wgpu.compute_func.module != 0;
  16328. }
  16329. if (!shd_valid) {
  16330. _sg_wgpu_discard_shader_func(&shd->wgpu.vertex_func);
  16331. _sg_wgpu_discard_shader_func(&shd->wgpu.fragment_func);
  16332. _sg_wgpu_discard_shader_func(&shd->wgpu.compute_func);
  16333. return SG_RESOURCESTATE_FAILED;
  16334. }
  16335. // create bind group layout and bind group for uniform blocks
  16336. // NOTE also need to create a mapping of sokol ub bind slots to array indices
  16337. // for the dynamic offsets array in the setBindGroup call
  16338. SOKOL_ASSERT(_SG_WGPU_MAX_UB_BINDGROUP_ENTRIES <= _SG_WGPU_MAX_VIEW_SMP_BINDGROUP_ENTRIES);
  16339. _SG_STRUCT(WGPUBindGroupLayoutEntry, bgl_entries[_SG_WGPU_MAX_VIEW_SMP_BINDGROUP_ENTRIES]);
  16340. _SG_STRUCT(WGPUBindGroupLayoutDescriptor, bgl_desc);
  16341. _SG_STRUCT(WGPUBindGroupEntry, bg_entries[_SG_WGPU_MAX_VIEW_SMP_BINDGROUP_ENTRIES]);
  16342. _SG_STRUCT(WGPUBindGroupDescriptor, bg_desc);
  16343. _SG_STRUCT(_sg_wgpu_dynoffset_mapping_t, dynoffset_map[SG_MAX_UNIFORMBLOCK_BINDSLOTS]);
  16344. size_t bgl_index = 0;
  16345. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  16346. if (shd->cmn.uniform_blocks[i].stage == SG_SHADERSTAGE_NONE) {
  16347. continue;
  16348. }
  16349. shd->wgpu.ub_grp0_bnd_n[i] = desc->uniform_blocks[i].wgsl_group0_binding_n;
  16350. WGPUBindGroupEntry* bg_entry = &bg_entries[bgl_index];
  16351. WGPUBindGroupLayoutEntry* bgl_entry = &bgl_entries[bgl_index];
  16352. bgl_entry->binding = shd->wgpu.ub_grp0_bnd_n[i];
  16353. bgl_entry->visibility = _sg_wgpu_shader_stage(shd->cmn.uniform_blocks[i].stage);
  16354. bgl_entry->buffer.type = WGPUBufferBindingType_Uniform;
  16355. bgl_entry->buffer.hasDynamicOffset = true;
  16356. bg_entry->binding = bgl_entry->binding;
  16357. bg_entry->buffer = _sg.wgpu.uniform.buf;
  16358. bg_entry->size = _SG_WGPU_MAX_UNIFORM_UPDATE_SIZE;
  16359. dynoffset_map[i].sokol_slot = (uint8_t)i;
  16360. dynoffset_map[i].wgpu_slot = (uint8_t)bgl_entry->binding;
  16361. bgl_index += 1;
  16362. }
  16363. bgl_desc.entryCount = bgl_index;
  16364. bgl_desc.entries = bgl_entries;
  16365. shd->wgpu.bgl_ub = wgpuDeviceCreateBindGroupLayout(_sg.wgpu.dev, &bgl_desc);
  16366. SOKOL_ASSERT(shd->wgpu.bgl_ub);
  16367. bg_desc.layout = shd->wgpu.bgl_ub;
  16368. bg_desc.entryCount = bgl_index;
  16369. bg_desc.entries = bg_entries;
  16370. shd->wgpu.bg_ub = wgpuDeviceCreateBindGroup(_sg.wgpu.dev, &bg_desc);
  16371. SOKOL_ASSERT(shd->wgpu.bg_ub);
  16372. // sort the dynoffset_map by wgpu bindings, this is because the
  16373. // dynamic offsets of the WebGPU setBindGroup call must be in
  16374. // 'binding order', not 'bindgroup entry order'
  16375. qsort(dynoffset_map, bgl_index, sizeof(_sg_wgpu_dynoffset_mapping_t), _sg_wgpu_dynoffset_cmp);
  16376. shd->wgpu.ub_num_dynoffsets = (uint8_t)bgl_index;
  16377. for (uint8_t i = 0; i < bgl_index; i++) {
  16378. const uint8_t sokol_slot = dynoffset_map[i].sokol_slot;
  16379. shd->wgpu.ub_dynoffsets[sokol_slot] = i;
  16380. }
  16381. // create bind group layout for textures, storage buffers/images and samplers
  16382. _sg_clear(bgl_entries, sizeof(bgl_entries));
  16383. _sg_clear(&bgl_desc, sizeof(bgl_desc));
  16384. bgl_index = 0;
  16385. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  16386. if (shd->cmn.views[i].stage == SG_SHADERSTAGE_NONE) {
  16387. continue;
  16388. }
  16389. WGPUBindGroupLayoutEntry* bgl_entry = &bgl_entries[bgl_index];
  16390. bgl_entry->visibility = _sg_wgpu_shader_stage(shd->cmn.views[i].stage);
  16391. if (shd->cmn.views[i].view_type == SG_VIEWTYPE_TEXTURE) {
  16392. shd->wgpu.view_grp1_bnd_n[i] = desc->views[i].texture.wgsl_group1_binding_n;
  16393. const bool msaa = shd->cmn.views[i].multisampled;
  16394. bgl_entry->texture.viewDimension = _sg_wgpu_texture_view_dimension(shd->cmn.views[i].image_type);
  16395. bgl_entry->texture.sampleType = _sg_wgpu_texture_sample_type(shd->cmn.views[i].sample_type, msaa);
  16396. bgl_entry->texture.multisampled = msaa;
  16397. } else if (shd->cmn.views[i].view_type == SG_VIEWTYPE_STORAGEBUFFER) {
  16398. shd->wgpu.view_grp1_bnd_n[i] = desc->views[i].storage_buffer.wgsl_group1_binding_n;
  16399. if (shd->cmn.views[i].sbuf_readonly) {
  16400. bgl_entry->buffer.type = WGPUBufferBindingType_ReadOnlyStorage;
  16401. } else {
  16402. bgl_entry->buffer.type = WGPUBufferBindingType_Storage;
  16403. }
  16404. } else if (shd->cmn.views[i].view_type == SG_VIEWTYPE_STORAGEIMAGE) {
  16405. shd->wgpu.view_grp1_bnd_n[i] = desc->views[i].storage_image.wgsl_group1_binding_n;
  16406. if (shd->cmn.views[i].simg_writeonly) {
  16407. bgl_entry->storageTexture.access = WGPUStorageTextureAccess_WriteOnly;
  16408. } else {
  16409. bgl_entry->storageTexture.access = WGPUStorageTextureAccess_ReadWrite;
  16410. }
  16411. bgl_entry->storageTexture.format = _sg_wgpu_textureformat(shd->cmn.views[i].access_format);
  16412. bgl_entry->texture.viewDimension = _sg_wgpu_texture_view_dimension(shd->cmn.views[i].image_type);
  16413. } else {
  16414. SOKOL_UNREACHABLE;
  16415. }
  16416. bgl_entry->binding = shd->wgpu.view_grp1_bnd_n[i];
  16417. bgl_index += 1;
  16418. }
  16419. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  16420. if (shd->cmn.samplers[i].stage == SG_SHADERSTAGE_NONE) {
  16421. continue;
  16422. }
  16423. shd->wgpu.smp_grp1_bnd_n[i] = desc->samplers[i].wgsl_group1_binding_n;
  16424. WGPUBindGroupLayoutEntry* bgl_entry = &bgl_entries[bgl_index];
  16425. bgl_entry->binding = shd->wgpu.smp_grp1_bnd_n[i];
  16426. bgl_entry->visibility = _sg_wgpu_shader_stage(shd->cmn.samplers[i].stage);
  16427. bgl_entry->sampler.type = _sg_wgpu_sampler_binding_type(shd->cmn.samplers[i].sampler_type);
  16428. bgl_index += 1;
  16429. }
  16430. bgl_desc.entryCount = bgl_index;
  16431. bgl_desc.entries = bgl_entries;
  16432. shd->wgpu.bgl_view_smp = wgpuDeviceCreateBindGroupLayout(_sg.wgpu.dev, &bgl_desc);
  16433. if (shd->wgpu.bgl_view_smp == 0) {
  16434. _SG_ERROR(WGPU_SHADER_CREATE_BINDGROUP_LAYOUT_FAILED);
  16435. return SG_RESOURCESTATE_FAILED;
  16436. }
  16437. return SG_RESOURCESTATE_VALID;
  16438. }
  16439. _SOKOL_PRIVATE void _sg_wgpu_discard_shader(_sg_shader_t* shd) {
  16440. SOKOL_ASSERT(shd);
  16441. _sg_wgpu_discard_shader_func(&shd->wgpu.vertex_func);
  16442. _sg_wgpu_discard_shader_func(&shd->wgpu.fragment_func);
  16443. _sg_wgpu_discard_shader_func(&shd->wgpu.compute_func);
  16444. if (shd->wgpu.bgl_ub) {
  16445. wgpuBindGroupLayoutRelease(shd->wgpu.bgl_ub);
  16446. shd->wgpu.bgl_ub = 0;
  16447. }
  16448. if (shd->wgpu.bg_ub) {
  16449. wgpuBindGroupRelease(shd->wgpu.bg_ub);
  16450. shd->wgpu.bg_ub = 0;
  16451. }
  16452. if (shd->wgpu.bgl_view_smp) {
  16453. wgpuBindGroupLayoutRelease(shd->wgpu.bgl_view_smp);
  16454. shd->wgpu.bgl_view_smp = 0;
  16455. }
  16456. }
  16457. _SOKOL_PRIVATE sg_resource_state _sg_wgpu_create_pipeline(_sg_pipeline_t* pip, const sg_pipeline_desc* desc) {
  16458. SOKOL_ASSERT(pip && desc);
  16459. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  16460. SOKOL_ASSERT(shd->wgpu.bgl_ub);
  16461. SOKOL_ASSERT(shd->wgpu.bgl_view_smp);
  16462. pip->wgpu.blend_color.r = (double) desc->blend_color.r;
  16463. pip->wgpu.blend_color.g = (double) desc->blend_color.g;
  16464. pip->wgpu.blend_color.b = (double) desc->blend_color.b;
  16465. pip->wgpu.blend_color.a = (double) desc->blend_color.a;
  16466. // - @group(0) for uniform blocks
  16467. // - @group(1) for all image, sampler and storagebuffer resources
  16468. size_t num_bgls = 2;
  16469. _SG_STRUCT(WGPUBindGroupLayout, wgpu_bgl[_SG_WGPU_MAX_BINDGROUPS]);
  16470. wgpu_bgl[_SG_WGPU_UB_BINDGROUP_INDEX ] = shd->wgpu.bgl_ub;
  16471. wgpu_bgl[_SG_WGPU_VIEW_SMP_BINDGROUP_INDEX] = shd->wgpu.bgl_view_smp;
  16472. _SG_STRUCT(WGPUPipelineLayoutDescriptor, wgpu_pl_desc);
  16473. wgpu_pl_desc.bindGroupLayoutCount = num_bgls;
  16474. wgpu_pl_desc.bindGroupLayouts = &wgpu_bgl[0];
  16475. const WGPUPipelineLayout wgpu_pip_layout = wgpuDeviceCreatePipelineLayout(_sg.wgpu.dev, &wgpu_pl_desc);
  16476. if (0 == wgpu_pip_layout) {
  16477. _SG_ERROR(WGPU_CREATE_PIPELINE_LAYOUT_FAILED);
  16478. return SG_RESOURCESTATE_FAILED;
  16479. }
  16480. SOKOL_ASSERT(wgpu_pip_layout);
  16481. if (pip->cmn.is_compute) {
  16482. _SG_STRUCT(WGPUComputePipelineDescriptor, wgpu_pip_desc);
  16483. wgpu_pip_desc.label = _sg_wgpu_stringview(desc->label);
  16484. wgpu_pip_desc.layout = wgpu_pip_layout;
  16485. wgpu_pip_desc.compute.module = shd->wgpu.compute_func.module;
  16486. wgpu_pip_desc.compute.entryPoint = _sg_wgpu_stringview(shd->wgpu.compute_func.entry.buf);
  16487. pip->wgpu.cpip = wgpuDeviceCreateComputePipeline(_sg.wgpu.dev, &wgpu_pip_desc);
  16488. wgpuPipelineLayoutRelease(wgpu_pip_layout);
  16489. if (0 == pip->wgpu.cpip) {
  16490. _SG_ERROR(WGPU_CREATE_COMPUTE_PIPELINE_FAILED);
  16491. return SG_RESOURCESTATE_FAILED;
  16492. }
  16493. } else {
  16494. _SG_STRUCT(WGPUVertexBufferLayout, wgpu_vb_layouts[SG_MAX_VERTEXBUFFER_BINDSLOTS]);
  16495. _SG_STRUCT(WGPUVertexAttribute, wgpu_vtx_attrs[SG_MAX_VERTEXBUFFER_BINDSLOTS][SG_MAX_VERTEX_ATTRIBUTES]);
  16496. int wgpu_vb_num = 0;
  16497. for (int vb_idx = 0; vb_idx < SG_MAX_VERTEXBUFFER_BINDSLOTS; vb_idx++, wgpu_vb_num++) {
  16498. const sg_vertex_buffer_layout_state* vbl_state = &desc->layout.buffers[vb_idx];
  16499. if (0 == vbl_state->stride) {
  16500. break;
  16501. }
  16502. wgpu_vb_layouts[vb_idx].arrayStride = (uint64_t)vbl_state->stride;
  16503. wgpu_vb_layouts[vb_idx].stepMode = _sg_wgpu_stepmode(vbl_state->step_func);
  16504. wgpu_vb_layouts[vb_idx].attributes = &wgpu_vtx_attrs[vb_idx][0];
  16505. }
  16506. for (int va_idx = 0; va_idx < SG_MAX_VERTEX_ATTRIBUTES; va_idx++) {
  16507. const sg_vertex_attr_state* va_state = &desc->layout.attrs[va_idx];
  16508. if (SG_VERTEXFORMAT_INVALID == va_state->format) {
  16509. break;
  16510. }
  16511. const int vb_idx = va_state->buffer_index;
  16512. SOKOL_ASSERT(vb_idx < SG_MAX_VERTEXBUFFER_BINDSLOTS);
  16513. SOKOL_ASSERT(pip->cmn.vertex_buffer_layout_active[vb_idx]);
  16514. const size_t wgpu_attr_idx = wgpu_vb_layouts[vb_idx].attributeCount;
  16515. wgpu_vb_layouts[vb_idx].attributeCount += 1;
  16516. wgpu_vtx_attrs[vb_idx][wgpu_attr_idx].format = _sg_wgpu_vertexformat(va_state->format);
  16517. wgpu_vtx_attrs[vb_idx][wgpu_attr_idx].offset = (uint64_t)va_state->offset;
  16518. wgpu_vtx_attrs[vb_idx][wgpu_attr_idx].shaderLocation = (uint32_t)va_idx;
  16519. }
  16520. _SG_STRUCT(WGPURenderPipelineDescriptor, wgpu_pip_desc);
  16521. _SG_STRUCT(WGPUDepthStencilState, wgpu_ds_state);
  16522. _SG_STRUCT(WGPUFragmentState, wgpu_frag_state);
  16523. _SG_STRUCT(WGPUColorTargetState, wgpu_ctgt_state[SG_MAX_COLOR_ATTACHMENTS]);
  16524. _SG_STRUCT(WGPUBlendState, wgpu_blend_state[SG_MAX_COLOR_ATTACHMENTS]);
  16525. wgpu_pip_desc.label = _sg_wgpu_stringview(desc->label);
  16526. wgpu_pip_desc.layout = wgpu_pip_layout;
  16527. wgpu_pip_desc.vertex.module = shd->wgpu.vertex_func.module;
  16528. wgpu_pip_desc.vertex.entryPoint = _sg_wgpu_stringview(shd->wgpu.vertex_func.entry.buf);
  16529. wgpu_pip_desc.vertex.bufferCount = (size_t)wgpu_vb_num;
  16530. wgpu_pip_desc.vertex.buffers = &wgpu_vb_layouts[0];
  16531. wgpu_pip_desc.primitive.topology = _sg_wgpu_topology(desc->primitive_type);
  16532. wgpu_pip_desc.primitive.stripIndexFormat = _sg_wgpu_stripindexformat(desc->primitive_type, desc->index_type);
  16533. wgpu_pip_desc.primitive.frontFace = _sg_wgpu_frontface(desc->face_winding);
  16534. wgpu_pip_desc.primitive.cullMode = _sg_wgpu_cullmode(desc->cull_mode);
  16535. if (SG_PIXELFORMAT_NONE != desc->depth.pixel_format) {
  16536. wgpu_ds_state.format = _sg_wgpu_textureformat(desc->depth.pixel_format);
  16537. wgpu_ds_state.depthWriteEnabled = _sg_wgpu_optional_bool(desc->depth.write_enabled);
  16538. wgpu_ds_state.depthCompare = _sg_wgpu_comparefunc(desc->depth.compare);
  16539. wgpu_ds_state.stencilFront.compare = _sg_wgpu_comparefunc(desc->stencil.front.compare);
  16540. wgpu_ds_state.stencilFront.failOp = _sg_wgpu_stencilop(desc->stencil.front.fail_op);
  16541. wgpu_ds_state.stencilFront.depthFailOp = _sg_wgpu_stencilop(desc->stencil.front.depth_fail_op);
  16542. wgpu_ds_state.stencilFront.passOp = _sg_wgpu_stencilop(desc->stencil.front.pass_op);
  16543. wgpu_ds_state.stencilBack.compare = _sg_wgpu_comparefunc(desc->stencil.back.compare);
  16544. wgpu_ds_state.stencilBack.failOp = _sg_wgpu_stencilop(desc->stencil.back.fail_op);
  16545. wgpu_ds_state.stencilBack.depthFailOp = _sg_wgpu_stencilop(desc->stencil.back.depth_fail_op);
  16546. wgpu_ds_state.stencilBack.passOp = _sg_wgpu_stencilop(desc->stencil.back.pass_op);
  16547. wgpu_ds_state.stencilReadMask = desc->stencil.read_mask;
  16548. wgpu_ds_state.stencilWriteMask = desc->stencil.write_mask;
  16549. wgpu_ds_state.depthBias = (int32_t)desc->depth.bias;
  16550. wgpu_ds_state.depthBiasSlopeScale = desc->depth.bias_slope_scale;
  16551. wgpu_ds_state.depthBiasClamp = desc->depth.bias_clamp;
  16552. wgpu_pip_desc.depthStencil = &wgpu_ds_state;
  16553. }
  16554. wgpu_pip_desc.multisample.count = (uint32_t)desc->sample_count;
  16555. wgpu_pip_desc.multisample.mask = 0xFFFFFFFF;
  16556. wgpu_pip_desc.multisample.alphaToCoverageEnabled = desc->alpha_to_coverage_enabled;
  16557. if (desc->color_count > 0) {
  16558. wgpu_frag_state.module = shd->wgpu.fragment_func.module;
  16559. wgpu_frag_state.entryPoint = _sg_wgpu_stringview(shd->wgpu.fragment_func.entry.buf);
  16560. wgpu_frag_state.targetCount = (size_t)desc->color_count;
  16561. wgpu_frag_state.targets = &wgpu_ctgt_state[0];
  16562. for (int i = 0; i < desc->color_count; i++) {
  16563. SOKOL_ASSERT(i < SG_MAX_COLOR_ATTACHMENTS);
  16564. wgpu_ctgt_state[i].format = _sg_wgpu_textureformat(desc->colors[i].pixel_format);
  16565. wgpu_ctgt_state[i].writeMask = _sg_wgpu_colorwritemask(desc->colors[i].write_mask);
  16566. if (desc->colors[i].blend.enabled) {
  16567. wgpu_ctgt_state[i].blend = &wgpu_blend_state[i];
  16568. wgpu_blend_state[i].color.operation = _sg_wgpu_blendop(desc->colors[i].blend.op_rgb);
  16569. wgpu_blend_state[i].color.srcFactor = _sg_wgpu_blendfactor(desc->colors[i].blend.src_factor_rgb);
  16570. wgpu_blend_state[i].color.dstFactor = _sg_wgpu_blendfactor(desc->colors[i].blend.dst_factor_rgb);
  16571. wgpu_blend_state[i].alpha.operation = _sg_wgpu_blendop(desc->colors[i].blend.op_alpha);
  16572. wgpu_blend_state[i].alpha.srcFactor = _sg_wgpu_blendfactor(desc->colors[i].blend.src_factor_alpha);
  16573. wgpu_blend_state[i].alpha.dstFactor = _sg_wgpu_blendfactor(desc->colors[i].blend.dst_factor_alpha);
  16574. }
  16575. }
  16576. wgpu_pip_desc.fragment = &wgpu_frag_state;
  16577. }
  16578. pip->wgpu.rpip = wgpuDeviceCreateRenderPipeline(_sg.wgpu.dev, &wgpu_pip_desc);
  16579. wgpuPipelineLayoutRelease(wgpu_pip_layout);
  16580. if (0 == pip->wgpu.rpip) {
  16581. _SG_ERROR(WGPU_CREATE_RENDER_PIPELINE_FAILED);
  16582. return SG_RESOURCESTATE_FAILED;
  16583. }
  16584. }
  16585. return SG_RESOURCESTATE_VALID;
  16586. }
  16587. _SOKOL_PRIVATE void _sg_wgpu_discard_pipeline(_sg_pipeline_t* pip) {
  16588. SOKOL_ASSERT(pip);
  16589. _sg_wgpu_bindgroups_cache_invalidate(_SG_WGPU_BINDGROUPSCACHEITEMTYPE_PIPELINE, &pip->slot);
  16590. if (pip->wgpu.rpip) {
  16591. wgpuRenderPipelineRelease(pip->wgpu.rpip);
  16592. pip->wgpu.rpip = 0;
  16593. }
  16594. if (pip->wgpu.cpip) {
  16595. wgpuComputePipelineRelease(pip->wgpu.cpip);
  16596. pip->wgpu.cpip = 0;
  16597. }
  16598. }
  16599. _SOKOL_PRIVATE sg_resource_state _sg_wgpu_create_view(_sg_view_t* view, const sg_view_desc* desc) {
  16600. SOKOL_ASSERT(view && desc);
  16601. if (view->cmn.type != SG_VIEWTYPE_STORAGEBUFFER) {
  16602. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  16603. SOKOL_ASSERT(img->wgpu.tex);
  16604. SOKOL_ASSERT(view->cmn.img.mip_level_count >= 1);
  16605. SOKOL_ASSERT(view->cmn.img.slice_count >= 1);
  16606. _SG_STRUCT(WGPUTextureViewDescriptor, wgpu_texview_desc);
  16607. wgpu_texview_desc.label = _sg_wgpu_stringview(desc->label);
  16608. wgpu_texview_desc.baseMipLevel = (uint32_t)view->cmn.img.mip_level;
  16609. wgpu_texview_desc.mipLevelCount = (uint32_t)view->cmn.img.mip_level_count;
  16610. wgpu_texview_desc.baseArrayLayer = (uint32_t)view->cmn.img.slice;
  16611. wgpu_texview_desc.arrayLayerCount = (uint32_t)view->cmn.img.slice_count;
  16612. if (view->cmn.type == SG_VIEWTYPE_TEXTURE) {
  16613. wgpu_texview_desc.dimension = _sg_wgpu_texture_view_dimension(img->cmn.type);
  16614. } else {
  16615. wgpu_texview_desc.dimension = _sg_wgpu_attachment_view_dimension(img->cmn.type);
  16616. }
  16617. if (view->cmn.type == SG_VIEWTYPE_DEPTHSTENCILATTACHMENT) {
  16618. wgpu_texview_desc.aspect = WGPUTextureAspect_All;
  16619. } else if (_sg_is_depth_or_depth_stencil_format(img->cmn.pixel_format)) {
  16620. wgpu_texview_desc.aspect = WGPUTextureAspect_DepthOnly;
  16621. } else {
  16622. wgpu_texview_desc.aspect = WGPUTextureAspect_All;
  16623. }
  16624. view->wgpu.view = wgpuTextureCreateView(img->wgpu.tex, &wgpu_texview_desc);
  16625. if (0 == view->wgpu.view) {
  16626. _SG_ERROR(WGPU_CREATE_TEXTURE_VIEW_FAILED);
  16627. return SG_RESOURCESTATE_FAILED;
  16628. }
  16629. }
  16630. return SG_RESOURCESTATE_VALID;
  16631. }
  16632. _SOKOL_PRIVATE void _sg_wgpu_discard_view(_sg_view_t* view) {
  16633. SOKOL_ASSERT(view);
  16634. _sg_wgpu_bindgroups_cache_invalidate(_SG_WGPU_BINDGROUPSCACHEITEMTYPE_VIEW, &view->slot);
  16635. if (view->wgpu.view) {
  16636. wgpuTextureViewRelease(view->wgpu.view);
  16637. view->wgpu.view = 0;
  16638. }
  16639. }
  16640. _SOKOL_PRIVATE void _sg_wgpu_init_color_att(WGPURenderPassColorAttachment* wgpu_att, const sg_color_attachment_action* action, WGPUTextureView color_view, WGPUTextureView resolve_view) {
  16641. wgpu_att->depthSlice = WGPU_DEPTH_SLICE_UNDEFINED;
  16642. wgpu_att->view = color_view;
  16643. wgpu_att->resolveTarget = resolve_view;
  16644. wgpu_att->loadOp = _sg_wgpu_load_op(color_view, action->load_action);
  16645. wgpu_att->storeOp = _sg_wgpu_store_op(color_view, action->store_action);
  16646. wgpu_att->clearValue.r = action->clear_value.r;
  16647. wgpu_att->clearValue.g = action->clear_value.g;
  16648. wgpu_att->clearValue.b = action->clear_value.b;
  16649. wgpu_att->clearValue.a = action->clear_value.a;
  16650. }
  16651. _SOKOL_PRIVATE void _sg_wgpu_init_ds_att(WGPURenderPassDepthStencilAttachment* wgpu_att, const sg_pass_action* action, sg_pixel_format fmt, WGPUTextureView view) {
  16652. wgpu_att->view = view;
  16653. wgpu_att->depthLoadOp = _sg_wgpu_load_op(view, action->depth.load_action);
  16654. wgpu_att->depthStoreOp = _sg_wgpu_store_op(view, action->depth.store_action);
  16655. wgpu_att->depthClearValue = action->depth.clear_value;
  16656. wgpu_att->depthReadOnly = false;
  16657. if (_sg_is_depth_stencil_format(fmt)) {
  16658. wgpu_att->stencilLoadOp = _sg_wgpu_load_op(view, action->stencil.load_action);
  16659. wgpu_att->stencilStoreOp = _sg_wgpu_store_op(view, action->stencil.store_action);
  16660. } else {
  16661. wgpu_att->stencilLoadOp = WGPULoadOp_Undefined;
  16662. wgpu_att->stencilStoreOp = WGPUStoreOp_Undefined;
  16663. }
  16664. wgpu_att->stencilClearValue = action->stencil.clear_value;
  16665. wgpu_att->stencilReadOnly = false;
  16666. }
  16667. _SOKOL_PRIVATE void _sg_wgpu_begin_compute_pass(const sg_pass* pass) {
  16668. _SG_STRUCT(WGPUComputePassDescriptor, wgpu_pass_desc);
  16669. wgpu_pass_desc.label = _sg_wgpu_stringview(pass->label);
  16670. _sg.wgpu.cpass_enc = wgpuCommandEncoderBeginComputePass(_sg.wgpu.cmd_enc, &wgpu_pass_desc);
  16671. SOKOL_ASSERT(_sg.wgpu.cpass_enc);
  16672. // clear initial bindings
  16673. wgpuComputePassEncoderSetBindGroup(_sg.wgpu.cpass_enc, _SG_WGPU_UB_BINDGROUP_INDEX, 0, 0, 0);
  16674. wgpuComputePassEncoderSetBindGroup(_sg.wgpu.cpass_enc, _SG_WGPU_VIEW_SMP_BINDGROUP_INDEX, 0, 0, 0);
  16675. _sg_stats_inc(wgpu.bindings.num_set_bindgroup);
  16676. }
  16677. _SOKOL_PRIVATE void _sg_wgpu_begin_render_pass(const sg_pass* pass, const _sg_attachments_ptrs_t* atts) {
  16678. const sg_swapchain* swapchain = &pass->swapchain;
  16679. const sg_pass_action* action = &pass->action;
  16680. _SG_STRUCT(WGPURenderPassDescriptor, wgpu_pass_desc);
  16681. _SG_STRUCT(WGPURenderPassColorAttachment, wgpu_color_att[SG_MAX_COLOR_ATTACHMENTS]);
  16682. _SG_STRUCT(WGPURenderPassDepthStencilAttachment, wgpu_ds_att);
  16683. wgpu_pass_desc.label = _sg_wgpu_stringview(pass->label);
  16684. if (!atts->empty) {
  16685. SOKOL_ASSERT(atts->num_color_views <= SG_MAX_COLOR_ATTACHMENTS);
  16686. for (int i = 0; i < atts->num_color_views; i++) {
  16687. SOKOL_ASSERT(atts->color_views[i]);
  16688. WGPUTextureView wgpu_color_view = atts->color_views[i]->wgpu.view;
  16689. WGPUTextureView wgpu_resolve_view = 0;
  16690. if (atts->resolve_views[i]) {
  16691. wgpu_resolve_view = atts->resolve_views[i]->wgpu.view;
  16692. }
  16693. _sg_wgpu_init_color_att(&wgpu_color_att[i], &action->colors[i], wgpu_color_view, wgpu_resolve_view);
  16694. }
  16695. wgpu_pass_desc.colorAttachmentCount = (size_t)atts->num_color_views;
  16696. wgpu_pass_desc.colorAttachments = &wgpu_color_att[0];
  16697. if (atts->ds_view) {
  16698. const _sg_image_t* img = _sg_image_ref_ptr(&atts->ds_view->cmn.img.ref);
  16699. WGPUTextureView wgpu_ds_view = atts->ds_view->wgpu.view;
  16700. SOKOL_ASSERT(wgpu_ds_view);
  16701. _sg_wgpu_init_ds_att(&wgpu_ds_att, action, img->cmn.pixel_format, wgpu_ds_view);
  16702. wgpu_pass_desc.depthStencilAttachment = &wgpu_ds_att;
  16703. }
  16704. } else {
  16705. WGPUTextureView wgpu_color_view = (WGPUTextureView) swapchain->wgpu.render_view;
  16706. WGPUTextureView wgpu_resolve_view = (WGPUTextureView) swapchain->wgpu.resolve_view;
  16707. WGPUTextureView wgpu_depth_stencil_view = (WGPUTextureView) swapchain->wgpu.depth_stencil_view;
  16708. _sg_wgpu_init_color_att(&wgpu_color_att[0], &action->colors[0], wgpu_color_view, wgpu_resolve_view);
  16709. wgpu_pass_desc.colorAttachmentCount = 1;
  16710. wgpu_pass_desc.colorAttachments = &wgpu_color_att[0];
  16711. if (wgpu_depth_stencil_view) {
  16712. SOKOL_ASSERT(swapchain->depth_format > SG_PIXELFORMAT_NONE);
  16713. _sg_wgpu_init_ds_att(&wgpu_ds_att, action, swapchain->depth_format, wgpu_depth_stencil_view);
  16714. wgpu_pass_desc.depthStencilAttachment = &wgpu_ds_att;
  16715. }
  16716. }
  16717. _sg.wgpu.rpass_enc = wgpuCommandEncoderBeginRenderPass(_sg.wgpu.cmd_enc, &wgpu_pass_desc);
  16718. SOKOL_ASSERT(_sg.wgpu.rpass_enc);
  16719. wgpuRenderPassEncoderSetBindGroup(_sg.wgpu.rpass_enc, _SG_WGPU_UB_BINDGROUP_INDEX, 0, 0, 0);
  16720. wgpuRenderPassEncoderSetBindGroup(_sg.wgpu.rpass_enc, _SG_WGPU_VIEW_SMP_BINDGROUP_INDEX, 0, 0, 0);
  16721. _sg_stats_inc(wgpu.bindings.num_set_bindgroup);
  16722. }
  16723. _SOKOL_PRIVATE void _sg_wgpu_begin_pass(const sg_pass* pass, const _sg_attachments_ptrs_t* atts) {
  16724. SOKOL_ASSERT(pass && atts);
  16725. SOKOL_ASSERT(_sg.wgpu.dev);
  16726. SOKOL_ASSERT(0 == _sg.wgpu.rpass_enc);
  16727. SOKOL_ASSERT(0 == _sg.wgpu.cpass_enc);
  16728. // first pass in the frame? create command encoder
  16729. if (0 == _sg.wgpu.cmd_enc) {
  16730. _SG_STRUCT(WGPUCommandEncoderDescriptor, cmd_enc_desc);
  16731. _sg.wgpu.cmd_enc = wgpuDeviceCreateCommandEncoder(_sg.wgpu.dev, &cmd_enc_desc);
  16732. SOKOL_ASSERT(_sg.wgpu.cmd_enc);
  16733. }
  16734. _sg_wgpu_bindings_cache_clear();
  16735. if (pass->compute) {
  16736. _sg_wgpu_begin_compute_pass(pass);
  16737. } else {
  16738. _sg_wgpu_begin_render_pass(pass, atts);
  16739. }
  16740. }
  16741. _SOKOL_PRIVATE void _sg_wgpu_end_pass(const _sg_attachments_ptrs_t* atts) {
  16742. _SOKOL_UNUSED(atts);
  16743. if (_sg.wgpu.rpass_enc) {
  16744. wgpuRenderPassEncoderEnd(_sg.wgpu.rpass_enc);
  16745. wgpuRenderPassEncoderRelease(_sg.wgpu.rpass_enc);
  16746. _sg.wgpu.rpass_enc = 0;
  16747. }
  16748. if (_sg.wgpu.cpass_enc) {
  16749. wgpuComputePassEncoderEnd(_sg.wgpu.cpass_enc);
  16750. wgpuComputePassEncoderRelease(_sg.wgpu.cpass_enc);
  16751. _sg.wgpu.cpass_enc = 0;
  16752. }
  16753. }
  16754. _SOKOL_PRIVATE void _sg_wgpu_commit(void) {
  16755. SOKOL_ASSERT(_sg.wgpu.cmd_enc);
  16756. _sg_wgpu_uniform_system_on_commit();
  16757. _SG_STRUCT(WGPUCommandBufferDescriptor, cmd_buf_desc);
  16758. WGPUCommandBuffer wgpu_cmd_buf = wgpuCommandEncoderFinish(_sg.wgpu.cmd_enc, &cmd_buf_desc);
  16759. SOKOL_ASSERT(wgpu_cmd_buf);
  16760. wgpuCommandEncoderRelease(_sg.wgpu.cmd_enc);
  16761. _sg.wgpu.cmd_enc = 0;
  16762. wgpuQueueSubmit(_sg.wgpu.queue, 1, &wgpu_cmd_buf);
  16763. wgpuCommandBufferRelease(wgpu_cmd_buf);
  16764. }
  16765. _SOKOL_PRIVATE void _sg_wgpu_apply_viewport(int x, int y, int w, int h, bool origin_top_left) {
  16766. SOKOL_ASSERT(_sg.wgpu.rpass_enc);
  16767. float xf = (float) x;
  16768. float yf = (float) (origin_top_left ? y : (_sg.cur_pass.dim.height - (y + h)));
  16769. float wf = (float) w;
  16770. float hf = (float) h;
  16771. wgpuRenderPassEncoderSetViewport(_sg.wgpu.rpass_enc, xf, yf, wf, hf, 0.0f, 1.0f);
  16772. }
  16773. _SOKOL_PRIVATE void _sg_wgpu_apply_scissor_rect(int x, int y, int w, int h, bool origin_top_left) {
  16774. SOKOL_ASSERT(_sg.wgpu.rpass_enc);
  16775. const _sg_recti_t clip = _sg_clipi(x, y, w, h, _sg.cur_pass.dim.width, _sg.cur_pass.dim.height);
  16776. uint32_t sx = (uint32_t) clip.x;
  16777. uint32_t sy = (uint32_t) (origin_top_left ? clip.y : (_sg.cur_pass.dim.height - (clip.y + clip.h)));
  16778. uint32_t sw = (uint32_t) clip.w;
  16779. uint32_t sh = (uint32_t) clip.h;
  16780. wgpuRenderPassEncoderSetScissorRect(_sg.wgpu.rpass_enc, sx, sy, sw, sh);
  16781. }
  16782. _SOKOL_PRIVATE void _sg_wgpu_apply_pipeline(_sg_pipeline_t* pip) {
  16783. SOKOL_ASSERT(pip);
  16784. _sg_wgpu_uniform_system_on_apply_pipeline();
  16785. if (pip->cmn.is_compute) {
  16786. SOKOL_ASSERT(_sg.cur_pass.is_compute);
  16787. SOKOL_ASSERT(pip->wgpu.cpip);
  16788. SOKOL_ASSERT(_sg.wgpu.cpass_enc);
  16789. wgpuComputePassEncoderSetPipeline(_sg.wgpu.cpass_enc, pip->wgpu.cpip);
  16790. } else {
  16791. SOKOL_ASSERT(!_sg.cur_pass.is_compute);
  16792. SOKOL_ASSERT(pip->wgpu.rpip);
  16793. SOKOL_ASSERT(_sg.wgpu.rpass_enc);
  16794. wgpuRenderPassEncoderSetPipeline(_sg.wgpu.rpass_enc, pip->wgpu.rpip);
  16795. wgpuRenderPassEncoderSetBlendConstant(_sg.wgpu.rpass_enc, &pip->wgpu.blend_color);
  16796. wgpuRenderPassEncoderSetStencilReference(_sg.wgpu.rpass_enc, pip->cmn.stencil.ref);
  16797. }
  16798. }
  16799. _SOKOL_PRIVATE bool _sg_wgpu_apply_bindings(_sg_bindings_ptrs_t* bnd) {
  16800. SOKOL_ASSERT(bnd);
  16801. bool retval = true;
  16802. if (!_sg.cur_pass.is_compute) {
  16803. retval &= _sg_wgpu_apply_index_buffer(bnd);
  16804. retval &= _sg_wgpu_apply_vertex_buffers(bnd);
  16805. }
  16806. retval &= _sg_wgpu_apply_bindings_bindgroup(bnd);
  16807. return retval;
  16808. }
  16809. _SOKOL_PRIVATE void _sg_wgpu_apply_uniforms(int ub_slot, const sg_range* data) {
  16810. const uint32_t alignment = _sg.wgpu.limits.minUniformBufferOffsetAlignment;
  16811. SOKOL_ASSERT(_sg.wgpu.uniform.staging);
  16812. SOKOL_ASSERT((ub_slot >= 0) && (ub_slot < SG_MAX_UNIFORMBLOCK_BINDSLOTS));
  16813. SOKOL_ASSERT((_sg.wgpu.uniform.offset + data->size) <= _sg.wgpu.uniform.num_bytes);
  16814. SOKOL_ASSERT((_sg.wgpu.uniform.offset & (alignment - 1)) == 0);
  16815. SOKOL_ASSERT(data->size <= _SG_WGPU_MAX_UNIFORM_UPDATE_SIZE);
  16816. _sg_stats_inc(wgpu.uniforms.num_set_bindgroup);
  16817. memcpy(_sg.wgpu.uniform.staging + _sg.wgpu.uniform.offset, data->ptr, data->size);
  16818. _sg.wgpu.uniform.bind_offsets[ub_slot] = _sg.wgpu.uniform.offset;
  16819. _sg.wgpu.uniform.offset = _sg_roundup_u32(_sg.wgpu.uniform.offset + (uint32_t)data->size, alignment);
  16820. _sg.wgpu.uniform.dirty = true;
  16821. }
  16822. _SOKOL_PRIVATE void _sg_wgpu_draw(int base_element, int num_elements, int num_instances, int base_vertex, int base_instance) {
  16823. SOKOL_ASSERT(_sg.wgpu.rpass_enc);
  16824. if (_sg.wgpu.uniform.dirty) {
  16825. _sg_wgpu_uniform_system_set_bindgroup();
  16826. }
  16827. if (_sg.use_indexed_draw) {
  16828. wgpuRenderPassEncoderDrawIndexed(_sg.wgpu.rpass_enc,
  16829. (uint32_t)num_elements,
  16830. (uint32_t)num_instances,
  16831. (uint32_t)base_element,
  16832. base_vertex,
  16833. (uint32_t)base_instance);
  16834. } else {
  16835. wgpuRenderPassEncoderDraw(_sg.wgpu.rpass_enc,
  16836. (uint32_t)num_elements,
  16837. (uint32_t)num_instances,
  16838. (uint32_t)base_element,
  16839. (uint32_t)base_instance);
  16840. }
  16841. }
  16842. _SOKOL_PRIVATE void _sg_wgpu_dispatch(int num_groups_x, int num_groups_y, int num_groups_z) {
  16843. SOKOL_ASSERT(_sg.wgpu.cpass_enc);
  16844. if (_sg.wgpu.uniform.dirty) {
  16845. _sg_wgpu_uniform_system_set_bindgroup();
  16846. }
  16847. wgpuComputePassEncoderDispatchWorkgroups(_sg.wgpu.cpass_enc,
  16848. (uint32_t)num_groups_x,
  16849. (uint32_t)num_groups_y,
  16850. (uint32_t)num_groups_z);
  16851. }
  16852. _SOKOL_PRIVATE void _sg_wgpu_update_buffer(_sg_buffer_t* buf, const sg_range* data) {
  16853. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  16854. _sg_wgpu_copy_buffer_data(buf, 0, data);
  16855. }
  16856. _SOKOL_PRIVATE void _sg_wgpu_append_buffer(_sg_buffer_t* buf, const sg_range* data, bool new_frame) {
  16857. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  16858. _SOKOL_UNUSED(new_frame);
  16859. _sg_wgpu_copy_buffer_data(buf, (uint64_t)buf->cmn.append_pos, data);
  16860. }
  16861. _SOKOL_PRIVATE void _sg_wgpu_update_image(_sg_image_t* img, const sg_image_data* data) {
  16862. SOKOL_ASSERT(img && data);
  16863. _sg_wgpu_copy_image_data(img, data);
  16864. }
  16865. // ██ ██ ██ ██ ██ ██ ██ █████ ███ ██ ██████ █████ ██████ ██ ██ ███████ ███ ██ ██████
  16866. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ ██ ██
  16867. // ██ ██ ██ ██ ██ █████ ███████ ██ ██ ██ ██████ ███████ ██ █████ █████ ██ ██ ██ ██ ██
  16868. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  16869. // ████ ██████ ███████ ██ ██ ██ ██ ██ ████ ██████ ██ ██ ██████ ██ ██ ███████ ██ ████ ██████
  16870. //
  16871. // >>vulkan
  16872. // >>vk
  16873. #elif defined(SOKOL_VULKAN)
  16874. _SOKOL_PRIVATE void _sg_vk_set_object_label(VkObjectType obj_type, uint64_t obj_handle, const char* label) {
  16875. SOKOL_ASSERT(_sg.vk.dev);
  16876. SOKOL_ASSERT(obj_handle != 0);
  16877. if (label) {
  16878. // FIXME: use vkSetDebugUtilsObjectNamesEXT
  16879. _SOKOL_UNUSED(obj_type && obj_handle && label);
  16880. }
  16881. }
  16882. _SOKOL_PRIVATE bool _sg_vk_is_read_access(_sg_vk_access_t access) {
  16883. _sg_vk_access_t read_bits =
  16884. _SG_VK_ACCESS_VERTEXBUFFER |
  16885. _SG_VK_ACCESS_INDEXBUFFER |
  16886. _SG_VK_ACCESS_STORAGEBUFFER_RO |
  16887. _SG_VK_ACCESS_TEXTURE |
  16888. _SG_VK_ACCESS_PRESENT;
  16889. return 0 == (access & ~read_bits);
  16890. }
  16891. _SOKOL_PRIVATE VkPipelineStageFlags2 _sg_vk_stage_mask(_sg_vk_access_t access, bool is_dst_access) {
  16892. access &= ~_SG_VK_ACCESS_DISCARD;
  16893. if (is_dst_access) {
  16894. SOKOL_ASSERT(access != _SG_VK_ACCESS_NONE);
  16895. }
  16896. VkPipelineStageFlags2 f = 0;
  16897. if (access == _SG_VK_ACCESS_NONE) {
  16898. return VK_PIPELINE_STAGE_2_NONE;
  16899. }
  16900. if (access & _SG_VK_ACCESS_PRESENT) {
  16901. return VK_PIPELINE_STAGE_2_NONE;
  16902. }
  16903. if (access & _SG_VK_ACCESS_STAGING) {
  16904. f |= VK_PIPELINE_STAGE_2_COPY_BIT;
  16905. }
  16906. if (access & _SG_VK_ACCESS_VERTEXBUFFER) {
  16907. f |= VK_PIPELINE_STAGE_2_VERTEX_ATTRIBUTE_INPUT_BIT;
  16908. }
  16909. if (access & _SG_VK_ACCESS_INDEXBUFFER) {
  16910. f |= VK_PIPELINE_STAGE_2_INDEX_INPUT_BIT;
  16911. }
  16912. if (access & (_SG_VK_ACCESS_STORAGEBUFFER_RO|_SG_VK_ACCESS_TEXTURE)) {
  16913. f |= VK_PIPELINE_STAGE_2_VERTEX_SHADER_BIT |
  16914. VK_PIPELINE_STAGE_2_FRAGMENT_SHADER_BIT |
  16915. VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT;
  16916. }
  16917. if (access & _SG_VK_ACCESS_STORAGEBUFFER_RW) {
  16918. f |= VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT;
  16919. }
  16920. if (access & _SG_VK_ACCESS_STORAGEIMAGE) {
  16921. f |= VK_PIPELINE_STAGE_2_COMPUTE_SHADER_BIT;
  16922. }
  16923. if (access & _SG_VK_ACCESS_COLOR_ATTACHMENT) {
  16924. f |= VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT;
  16925. }
  16926. if (access & _SG_VK_ACCESS_RESOLVE_ATTACHMENT) {
  16927. f |= VK_PIPELINE_STAGE_2_COLOR_ATTACHMENT_OUTPUT_BIT;
  16928. }
  16929. if (access & (_SG_VK_ACCESS_DEPTH_ATTACHMENT|_SG_VK_ACCESS_STENCIL_ATTACHMENT)) {
  16930. f |= VK_PIPELINE_STAGE_2_EARLY_FRAGMENT_TESTS_BIT|VK_PIPELINE_STAGE_2_LATE_FRAGMENT_TESTS_BIT;
  16931. }
  16932. SOKOL_ASSERT(f != 0);
  16933. return f;
  16934. }
  16935. // return pipeline stages on 'before' side of a barrier
  16936. _SOKOL_PRIVATE VkPipelineStageFlags2 _sg_vk_src_stage_mask(_sg_vk_access_t access) {
  16937. return _sg_vk_stage_mask(access, false);
  16938. }
  16939. // return pipeline stage on 'after side' of a barrier
  16940. _SOKOL_PRIVATE VkPipelineStageFlags2 _sg_vk_dst_stage_mask(_sg_vk_access_t access) {
  16941. return _sg_vk_stage_mask(access, true);
  16942. }
  16943. _SOKOL_PRIVATE VkAccessFlags2 _sg_vk_access_mask(_sg_vk_access_t access, bool is_dst_access) {
  16944. access &= ~_SG_VK_ACCESS_DISCARD;
  16945. if (access == _SG_VK_ACCESS_NONE) {
  16946. return VK_ACCESS_2_NONE;
  16947. }
  16948. if (access & _SG_VK_ACCESS_PRESENT) {
  16949. return VK_ACCESS_2_NONE;
  16950. }
  16951. VkAccessFlags2 f = VK_ACCESS_2_NONE;
  16952. if (is_dst_access) {
  16953. // NOTE: read bits don't make sense for src-mask
  16954. if (access & _SG_VK_ACCESS_VERTEXBUFFER) {
  16955. f |= VK_ACCESS_2_VERTEX_ATTRIBUTE_READ_BIT;
  16956. }
  16957. if (access & _SG_VK_ACCESS_INDEXBUFFER) {
  16958. f |= VK_ACCESS_2_INDEX_READ_BIT;
  16959. }
  16960. if (access & _SG_VK_ACCESS_STORAGEBUFFER_RO) {
  16961. f |= VK_ACCESS_2_SHADER_STORAGE_READ_BIT;
  16962. }
  16963. if (access & _SG_VK_ACCESS_TEXTURE) {
  16964. f |= VK_ACCESS_2_SHADER_SAMPLED_READ_BIT;
  16965. }
  16966. }
  16967. if (access & _SG_VK_ACCESS_STAGING) {
  16968. f |= VK_ACCESS_2_TRANSFER_WRITE_BIT;
  16969. }
  16970. if (access & _SG_VK_ACCESS_STORAGEBUFFER_RW) {
  16971. f |= VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT | VK_ACCESS_2_SHADER_STORAGE_READ_BIT;
  16972. }
  16973. if (access & _SG_VK_ACCESS_STORAGEIMAGE) {
  16974. f |= VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT | VK_ACCESS_2_SHADER_STORAGE_READ_BIT;
  16975. }
  16976. if (access & _SG_VK_ACCESS_COLOR_ATTACHMENT) {
  16977. f |= VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT;
  16978. }
  16979. if (access & _SG_VK_ACCESS_RESOLVE_ATTACHMENT) {
  16980. f |= VK_ACCESS_2_COLOR_ATTACHMENT_WRITE_BIT;
  16981. }
  16982. if (access & (_SG_VK_ACCESS_DEPTH_ATTACHMENT | _SG_VK_ACCESS_STENCIL_ATTACHMENT)) {
  16983. f |= VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  16984. if (is_dst_access) {
  16985. f |= VK_ACCESS_2_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
  16986. }
  16987. }
  16988. return f;
  16989. }
  16990. _SOKOL_PRIVATE VkAccessFlags2 _sg_vk_src_access_mask(_sg_vk_access_t access) {
  16991. return _sg_vk_access_mask(access, false);
  16992. }
  16993. _SOKOL_PRIVATE VkAccessFlags2 _sg_vk_dst_access_mask(_sg_vk_access_t access) {
  16994. return _sg_vk_access_mask(access, true);
  16995. }
  16996. _SOKOL_PRIVATE VkImageLayout _sg_vk_image_layout(_sg_vk_access_t access) {
  16997. // NOTE: "image layout transitions with VK_IMAGE_LAYOUT_UNDEFINED allow
  16998. // the implementation to discard the image subresource range"
  16999. if (access & _SG_VK_ACCESS_DISCARD) {
  17000. return VK_IMAGE_LAYOUT_UNDEFINED;
  17001. }
  17002. switch (access) {
  17003. case _SG_VK_ACCESS_NONE:
  17004. return VK_IMAGE_LAYOUT_UNDEFINED;
  17005. case _SG_VK_ACCESS_STAGING:
  17006. return VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  17007. case _SG_VK_ACCESS_TEXTURE:
  17008. return VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  17009. case _SG_VK_ACCESS_STORAGEIMAGE:
  17010. return VK_IMAGE_LAYOUT_GENERAL;
  17011. case _SG_VK_ACCESS_COLOR_ATTACHMENT:
  17012. case _SG_VK_ACCESS_RESOLVE_ATTACHMENT:
  17013. case _SG_VK_ACCESS_DEPTH_ATTACHMENT:
  17014. case _SG_VK_ACCESS_DEPTH_ATTACHMENT|_SG_VK_ACCESS_STENCIL_ATTACHMENT:
  17015. return VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL;
  17016. case _SG_VK_ACCESS_PRESENT:
  17017. return VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
  17018. default:
  17019. SOKOL_UNREACHABLE;
  17020. return VK_IMAGE_LAYOUT_UNDEFINED;
  17021. }
  17022. }
  17023. _SOKOL_PRIVATE void _sg_vk_swapchain_beginpass_barrier(VkCommandBuffer cmd_buf, VkImage vkimg, _sg_vk_access_t pass_access) {
  17024. SOKOL_ASSERT(cmd_buf);
  17025. _SG_STRUCT(VkImageMemoryBarrier2, barrier);
  17026. barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2;
  17027. barrier.srcStageMask = _sg_vk_src_stage_mask(pass_access);
  17028. barrier.srcAccessMask = _sg_vk_src_access_mask(pass_access);
  17029. barrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  17030. barrier.dstStageMask = _sg_vk_dst_stage_mask(pass_access);
  17031. barrier.dstAccessMask = _sg_vk_dst_access_mask(pass_access);
  17032. barrier.newLayout = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL;
  17033. barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  17034. barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  17035. barrier.image = vkimg;
  17036. if (0 != (pass_access & (_SG_VK_ACCESS_DEPTH_ATTACHMENT|_SG_VK_ACCESS_STENCIL_ATTACHMENT))) {
  17037. barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
  17038. if (0 != (pass_access & _SG_VK_ACCESS_STENCIL_ATTACHMENT)) {
  17039. barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  17040. }
  17041. } else {
  17042. barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  17043. }
  17044. barrier.subresourceRange.levelCount = 1;
  17045. barrier.subresourceRange.layerCount = 1;
  17046. _SG_STRUCT(VkDependencyInfo, dep_info);
  17047. dep_info.sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO;
  17048. dep_info.imageMemoryBarrierCount = 1;
  17049. dep_info.pImageMemoryBarriers = &barrier;
  17050. vkCmdPipelineBarrier2(cmd_buf, &dep_info);
  17051. _sg_stats_inc(vk.num_cmd_pipeline_barrier);
  17052. }
  17053. _SOKOL_PRIVATE void _sg_vk_swapchain_endpass_barrier(VkCommandBuffer cmd_buf, VkImage vkimg, _sg_vk_access_t pass_access, bool present) {
  17054. SOKOL_ASSERT(cmd_buf);
  17055. _SG_STRUCT(VkImageMemoryBarrier2, barrier);
  17056. barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2;
  17057. barrier.srcStageMask = _sg_vk_src_stage_mask(pass_access);
  17058. barrier.srcAccessMask = _sg_vk_src_access_mask(pass_access);
  17059. barrier.oldLayout = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL;
  17060. barrier.dstStageMask = VK_PIPELINE_STAGE_2_NONE;
  17061. barrier.dstAccessMask = VK_ACCESS_2_NONE;
  17062. if (present) {
  17063. barrier.newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
  17064. } else {
  17065. barrier.newLayout = VK_IMAGE_LAYOUT_ATTACHMENT_OPTIMAL;
  17066. }
  17067. barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  17068. barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  17069. barrier.image = vkimg;
  17070. if (0 != (pass_access & (_SG_VK_ACCESS_DEPTH_ATTACHMENT|_SG_VK_ACCESS_STENCIL_ATTACHMENT))) {
  17071. barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
  17072. if (0 != (pass_access & _SG_VK_ACCESS_STENCIL_ATTACHMENT)) {
  17073. barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  17074. }
  17075. } else {
  17076. barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  17077. }
  17078. barrier.subresourceRange.levelCount = 1;
  17079. barrier.subresourceRange.layerCount = 1;
  17080. _SG_STRUCT(VkDependencyInfo, dep_info);
  17081. dep_info.sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO;
  17082. dep_info.imageMemoryBarrierCount = 1;
  17083. dep_info.pImageMemoryBarriers = &barrier;
  17084. vkCmdPipelineBarrier2(cmd_buf, &dep_info);
  17085. _sg_stats_inc(vk.num_cmd_pipeline_barrier);
  17086. }
  17087. _SOKOL_PRIVATE void _sg_vk_image_barrier(VkCommandBuffer cmd_buf, _sg_image_t* img, _sg_vk_access_t new_access) {
  17088. SOKOL_ASSERT(cmd_buf && img && img->vk.img);
  17089. if (_sg_vk_is_read_access(img->vk.cur_access) && _sg_vk_is_read_access(new_access)) {
  17090. return;
  17091. }
  17092. _SG_STRUCT(VkImageMemoryBarrier2, barrier);
  17093. barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER_2;
  17094. barrier.srcStageMask = _sg_vk_src_stage_mask(img->vk.cur_access);
  17095. barrier.srcAccessMask = _sg_vk_src_access_mask(img->vk.cur_access);
  17096. barrier.oldLayout = _sg_vk_image_layout(img->vk.cur_access);
  17097. barrier.dstStageMask = _sg_vk_dst_stage_mask(new_access);
  17098. barrier.dstAccessMask = _sg_vk_dst_access_mask(new_access);
  17099. barrier.newLayout = _sg_vk_image_layout(new_access);
  17100. barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  17101. barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  17102. barrier.image = img->vk.img;
  17103. if (_sg_is_depth_or_depth_stencil_format(img->cmn.pixel_format)) {
  17104. barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_DEPTH_BIT;
  17105. if (_sg_is_depth_stencil_format(img->cmn.pixel_format)) {
  17106. barrier.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  17107. }
  17108. } else {
  17109. barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  17110. }
  17111. barrier.subresourceRange.levelCount = VK_REMAINING_MIP_LEVELS;
  17112. barrier.subresourceRange.layerCount = VK_REMAINING_ARRAY_LAYERS;
  17113. _SG_STRUCT(VkDependencyInfo, dep_info);
  17114. dep_info.sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO;
  17115. dep_info.imageMemoryBarrierCount = 1;
  17116. dep_info.pImageMemoryBarriers = &barrier;
  17117. vkCmdPipelineBarrier2(cmd_buf, &dep_info);
  17118. _sg_stats_inc(vk.num_cmd_pipeline_barrier);
  17119. img->vk.cur_access = new_access;
  17120. }
  17121. _SOKOL_PRIVATE void _sg_vk_buffer_barrier(VkCommandBuffer cmd_buf, _sg_buffer_t* buf, _sg_vk_access_t new_access) {
  17122. SOKOL_ASSERT(cmd_buf && buf && buf->vk.buf);
  17123. if (_sg_vk_is_read_access(buf->vk.cur_access) && _sg_vk_is_read_access(new_access)) {
  17124. return;
  17125. }
  17126. _SG_STRUCT(VkBufferMemoryBarrier2, barrier);
  17127. barrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER_2;
  17128. barrier.srcStageMask = _sg_vk_src_stage_mask(buf->vk.cur_access);
  17129. barrier.srcAccessMask = _sg_vk_src_access_mask(buf->vk.cur_access);
  17130. barrier.dstStageMask = _sg_vk_dst_stage_mask(new_access);
  17131. barrier.dstAccessMask = _sg_vk_dst_access_mask(new_access);
  17132. barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  17133. barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
  17134. barrier.buffer = buf->vk.buf;
  17135. barrier.offset = 0;
  17136. barrier.size = VK_WHOLE_SIZE;
  17137. _SG_STRUCT(VkDependencyInfo, dep_info);
  17138. dep_info.sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO;
  17139. dep_info.bufferMemoryBarrierCount = 1;
  17140. dep_info.pBufferMemoryBarriers = &barrier;
  17141. vkCmdPipelineBarrier2(cmd_buf, &dep_info);
  17142. _sg_stats_inc(vk.num_cmd_pipeline_barrier);
  17143. buf->vk.cur_access = new_access;
  17144. }
  17145. _SOKOL_PRIVATE void _sg_vk_barrier_on_begin_pass(VkCommandBuffer cmd_buf, const sg_pass* pass, const _sg_attachments_ptrs_t* atts, bool is_compute_pass) {
  17146. SOKOL_ASSERT(cmd_buf);
  17147. if (is_compute_pass) {
  17148. SOKOL_ASSERT(0 == _sg.vk.track.buffers.cur_slot);
  17149. SOKOL_ASSERT(0 == _sg.vk.track.images.cur_slot);
  17150. } else {
  17151. const bool is_swapchain_pass = atts->empty;
  17152. if (is_swapchain_pass) {
  17153. const sg_vulkan_swapchain* vk_swapchain = &pass->swapchain.vulkan;
  17154. SOKOL_ASSERT(vk_swapchain->render_image);
  17155. VkImage vk_color_image = (VkImage)vk_swapchain->render_image;
  17156. _sg_vk_swapchain_beginpass_barrier(cmd_buf, vk_color_image, _SG_VK_ACCESS_COLOR_ATTACHMENT);
  17157. if (_sg.cur_pass.swapchain.sample_count > 1) {
  17158. VkImage vk_resolve_image = (VkImage)vk_swapchain->resolve_image;
  17159. SOKOL_ASSERT(vk_resolve_image);
  17160. _sg_vk_swapchain_beginpass_barrier(cmd_buf, vk_resolve_image, _SG_VK_ACCESS_RESOLVE_ATTACHMENT);
  17161. }
  17162. if (vk_swapchain->depth_stencil_image) {
  17163. VkImage vk_ds_image = (VkImage)vk_swapchain->depth_stencil_image;
  17164. const bool has_stencil = _sg_is_depth_stencil_format(_sg.cur_pass.swapchain.depth_fmt);
  17165. _sg_vk_access_t access = _SG_VK_ACCESS_DEPTH_ATTACHMENT;
  17166. if (has_stencil) {
  17167. access |= _SG_VK_ACCESS_STENCIL_ATTACHMENT;
  17168. }
  17169. _sg_vk_swapchain_beginpass_barrier(cmd_buf, vk_ds_image, access);
  17170. }
  17171. } else {
  17172. SOKOL_ASSERT(atts->num_color_views <= SG_MAX_COLOR_ATTACHMENTS);
  17173. for (int i = 0; i < atts->num_color_views; i++) {
  17174. SOKOL_ASSERT(atts->color_views[i]);
  17175. _sg_image_t* color_image = _sg_image_ref_ptr(&atts->color_views[i]->cmn.img.ref);
  17176. if (pass->action.colors[i].load_action != SG_LOADACTION_LOAD) {
  17177. // don't need to preserve image content for clear and dontcare
  17178. color_image->vk.cur_access |= _SG_VK_ACCESS_DISCARD;
  17179. }
  17180. _sg_vk_image_barrier(cmd_buf, color_image, _SG_VK_ACCESS_COLOR_ATTACHMENT);
  17181. if (atts->resolve_views[i]) {
  17182. _sg_image_t* resolve_image = _sg_image_ref_ptr(&atts->resolve_views[i]->cmn.img.ref);
  17183. // never need to preserve content for resolve image
  17184. resolve_image->vk.cur_access |= _SG_VK_ACCESS_DISCARD;
  17185. _sg_vk_image_barrier(cmd_buf, resolve_image, _SG_VK_ACCESS_RESOLVE_ATTACHMENT);
  17186. }
  17187. }
  17188. if (atts->ds_view) {
  17189. _sg_image_t* ds_image = _sg_image_ref_ptr(&atts->ds_view->cmn.img.ref);
  17190. const bool has_stencil = _sg_is_depth_stencil_format(ds_image->cmn.pixel_format);
  17191. if ((pass->action.depth.load_action != SG_LOADACTION_LOAD) &&
  17192. (pass->action.stencil.load_action != SG_LOADACTION_LOAD))
  17193. {
  17194. // don't need to preserve image content for clear and dontcare
  17195. ds_image->vk.cur_access |= _SG_VK_ACCESS_DISCARD;
  17196. }
  17197. _sg_vk_access_t dst_access = _SG_VK_ACCESS_DEPTH_ATTACHMENT;
  17198. if (has_stencil) {
  17199. dst_access |= _SG_VK_ACCESS_STENCIL_ATTACHMENT;
  17200. }
  17201. _sg_vk_image_barrier(cmd_buf, ds_image, dst_access);
  17202. }
  17203. }
  17204. }
  17205. }
  17206. _SOKOL_PRIVATE void _sg_vk_barrier_on_apply_bindings(VkCommandBuffer cmd_buf, const _sg_bindings_ptrs_t* bnd, bool is_compute_pass) {
  17207. SOKOL_ASSERT(bnd);
  17208. if (is_compute_pass) {
  17209. SOKOL_ASSERT(bnd->pip);
  17210. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  17211. const _sg_view_t* view = bnd->views[i];
  17212. if (0 == view) {
  17213. continue;
  17214. } else if (view->cmn.type == SG_VIEWTYPE_STORAGEBUFFER) {
  17215. const _sg_shader_t* shd = _sg_shader_ref_ptr(&bnd->pip->cmn.shader);
  17216. _sg_buffer_t* buf = _sg_buffer_ref_ptr(&view->cmn.buf.ref);
  17217. _sg_vk_access_t new_access = shd->cmn.views[i].sbuf_readonly
  17218. ? _SG_VK_ACCESS_STORAGEBUFFER_RO
  17219. : _SG_VK_ACCESS_STORAGEBUFFER_RW;
  17220. _sg_vk_buffer_barrier(cmd_buf, buf, new_access);
  17221. _sg_track_add(&_sg.vk.track.buffers, buf->slot.id);
  17222. } else if (view->cmn.type == SG_VIEWTYPE_STORAGEIMAGE) {
  17223. _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  17224. _sg_vk_image_barrier(cmd_buf, img, _SG_VK_ACCESS_STORAGEIMAGE);
  17225. _sg_track_add(&_sg.vk.track.images, img->slot.id);
  17226. } else if (view->cmn.type == SG_VIEWTYPE_TEXTURE) {
  17227. _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  17228. _sg_vk_image_barrier(cmd_buf, img, _SG_VK_ACCESS_TEXTURE);
  17229. _sg_track_add(&_sg.vk.track.images, img->slot.id);
  17230. } else {
  17231. SOKOL_UNREACHABLE;
  17232. }
  17233. }
  17234. } else {
  17235. // no transitions allowed in render passes, but check if resources are in
  17236. // correct access state
  17237. for (size_t i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  17238. if (bnd->vbs[i]) {
  17239. SOKOL_ASSERT(0 != (bnd->vbs[i]->vk.cur_access & _SG_VK_ACCESS_VERTEXBUFFER));
  17240. }
  17241. }
  17242. if (bnd->ib) {
  17243. SOKOL_ASSERT(0 != (bnd->ib->vk.cur_access & _SG_VK_ACCESS_INDEXBUFFER));
  17244. }
  17245. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  17246. const _sg_view_t* view = bnd->views[i];
  17247. if (0 == view) {
  17248. continue;
  17249. }
  17250. else if (view->cmn.type == SG_VIEWTYPE_STORAGEBUFFER) {
  17251. const _sg_buffer_t* buf = _sg_buffer_ref_ptr(&view->cmn.buf.ref);
  17252. _SOKOL_UNUSED(buf);
  17253. SOKOL_ASSERT(0 != (buf->vk.cur_access & _SG_VK_ACCESS_STORAGEBUFFER_RO));
  17254. } else if (view->cmn.type == SG_VIEWTYPE_TEXTURE) {
  17255. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  17256. _SOKOL_UNUSED(img);
  17257. SOKOL_ASSERT(0 != (img->vk.cur_access & _SG_VK_ACCESS_TEXTURE));
  17258. } else {
  17259. SOKOL_UNREACHABLE;
  17260. }
  17261. }
  17262. }
  17263. }
  17264. _SOKOL_PRIVATE void _sg_vk_barrier_on_end_pass(VkCommandBuffer cmd_buf, const _sg_attachments_ptrs_t* atts, bool is_compute_pass) {
  17265. SOKOL_ASSERT(cmd_buf);
  17266. if (is_compute_pass) {
  17267. // transition all tracked buffers into vertex+index+sbuf-ro access
  17268. const _sg_vk_access_t new_buf_access = _SG_VK_ACCESS_VERTEXBUFFER|_SG_VK_ACCESS_INDEXBUFFER|_SG_VK_ACCESS_STORAGEBUFFER_RO;
  17269. for (int i = 0; i < _sg.vk.track.buffers.cur_slot; i++) {
  17270. const uint32_t buf_id = _sg.vk.track.buffers.slots[i];
  17271. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id);
  17272. if (buf) {
  17273. _sg_vk_buffer_barrier(cmd_buf, buf, new_buf_access);
  17274. }
  17275. }
  17276. _sg_track_reset(&_sg.vk.track.buffers);
  17277. // transition all tracked images into texture access
  17278. const _sg_vk_access_t new_img_access = _SG_VK_ACCESS_TEXTURE;
  17279. for (int i = 0; i < _sg.vk.track.images.cur_slot; i++) {
  17280. const uint32_t img_id = _sg.vk.track.images.slots[i];
  17281. _sg_image_t* img = _sg_lookup_image(img_id);
  17282. if (img) {
  17283. _sg_vk_image_barrier(cmd_buf, img, new_img_access);
  17284. }
  17285. }
  17286. _sg_track_reset(&_sg.vk.track.images);
  17287. } else {
  17288. const bool is_swapchain_pass = atts->empty;
  17289. if (is_swapchain_pass) {
  17290. SOKOL_ASSERT(_sg.vk.swapchain.render_image);
  17291. VkImage present_image = _sg.vk.swapchain.resolve_image
  17292. ? (VkImage)_sg.vk.swapchain.resolve_image
  17293. : (VkImage)_sg.vk.swapchain.render_image;
  17294. _sg_vk_swapchain_endpass_barrier(cmd_buf, present_image, _SG_VK_ACCESS_COLOR_ATTACHMENT, true);
  17295. } else {
  17296. for (int i = 0; i < atts->num_color_views; i++) {
  17297. if (_sg.cur_pass.action.colors[i].store_action == SG_STOREACTION_STORE) {
  17298. SOKOL_ASSERT(atts->color_views[i]);
  17299. _sg_image_t* img = _sg_image_ref_ptr(&atts->color_views[i]->cmn.img.ref);
  17300. _sg_vk_image_barrier(cmd_buf, img, _SG_VK_ACCESS_TEXTURE);
  17301. }
  17302. if (atts->resolve_views[i]) {
  17303. _sg_image_t* img = _sg_image_ref_ptr(&atts->resolve_views[i]->cmn.img.ref);
  17304. _sg_vk_image_barrier(cmd_buf, img, _SG_VK_ACCESS_TEXTURE);
  17305. }
  17306. }
  17307. if (atts->ds_view) {
  17308. _sg_image_t* img = _sg_image_ref_ptr(&atts->ds_view->cmn.img.ref);
  17309. if (_sg.cur_pass.action.depth.store_action == SG_STOREACTION_STORE) {
  17310. _sg_vk_image_barrier(cmd_buf, img, _SG_VK_ACCESS_TEXTURE);
  17311. }
  17312. }
  17313. }
  17314. }
  17315. }
  17316. _SOKOL_PRIVATE int _sg_vk_mem_find_memory_type_index(uint32_t type_filter, VkMemoryPropertyFlags props) {
  17317. SOKOL_ASSERT(_sg.vk.phys_dev);
  17318. _SG_STRUCT(VkPhysicalDeviceMemoryProperties, mem_props);
  17319. vkGetPhysicalDeviceMemoryProperties(_sg.vk.phys_dev, &mem_props);
  17320. for (uint32_t i = 0; i < mem_props.memoryTypeCount; i++) {
  17321. if ((type_filter & (1 << i)) && ((mem_props.memoryTypes[i].propertyFlags & props) == props)) {
  17322. return (int)i;
  17323. }
  17324. }
  17325. return -1;
  17326. }
  17327. _SOKOL_PRIVATE VkDeviceMemory _sg_vk_mem_alloc_device_memory(_sg_vk_memtype_t mem_type, const VkMemoryRequirements* mem_reqs) {
  17328. SOKOL_ASSERT(_sg.vk.dev);
  17329. SOKOL_ASSERT(mem_reqs);
  17330. VkMemoryPropertyFlags mem_prop_flags = 0;
  17331. VkMemoryAllocateFlags mem_alloc_flags = 0;
  17332. switch (mem_type) {
  17333. case _SG_VK_MEMTYPE_GENERIC_BUFFER:
  17334. mem_prop_flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  17335. break;
  17336. case _SG_VK_MEMTYPE_STORAGE_BUFFER:
  17337. mem_prop_flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  17338. mem_alloc_flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
  17339. break;
  17340. case _SG_VK_MEMTYPE_IMAGE:
  17341. mem_prop_flags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  17342. break;
  17343. case _SG_VK_MEMTYPE_STAGING_COPY:
  17344. mem_prop_flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
  17345. break;
  17346. case _SG_VK_MEMTYPE_STAGING_STREAM:
  17347. mem_prop_flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
  17348. break;
  17349. case _SG_VK_MEMTYPE_UNIFORMS:
  17350. mem_prop_flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
  17351. mem_alloc_flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
  17352. break;
  17353. case _SG_VK_MEMTYPE_DESCRIPTORS:
  17354. mem_prop_flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
  17355. mem_alloc_flags = VK_MEMORY_ALLOCATE_DEVICE_ADDRESS_BIT;
  17356. break;
  17357. default:
  17358. SOKOL_UNREACHABLE;
  17359. break;
  17360. }
  17361. int mem_type_index = _sg_vk_mem_find_memory_type_index(mem_reqs->memoryTypeBits, mem_prop_flags);
  17362. if (-1 == mem_type_index) {
  17363. _SG_ERROR(VULKAN_ALLOC_DEVICE_MEMORY_NO_SUITABLE_MEMORY_TYPE);
  17364. return 0;
  17365. }
  17366. _SG_STRUCT(VkMemoryAllocateFlagsInfo, flags_info);
  17367. flags_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_FLAGS_INFO;
  17368. flags_info.flags = mem_alloc_flags;
  17369. _SG_STRUCT(VkMemoryAllocateInfo, alloc_info);
  17370. alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
  17371. alloc_info.pNext = &flags_info;
  17372. alloc_info.allocationSize = mem_reqs->size;
  17373. alloc_info.memoryTypeIndex = (uint32_t) mem_type_index;
  17374. VkDeviceMemory vk_dev_mem = 0;
  17375. VkResult res = vkAllocateMemory(_sg.vk.dev, &alloc_info, 0, &vk_dev_mem);
  17376. _sg_stats_inc(vk.num_allocate_memory);
  17377. _sg_stats_add(vk.size_allocate_memory, mem_reqs->size);
  17378. if (res != VK_SUCCESS) {
  17379. _SG_ERROR(VULKAN_ALLOCATE_MEMORY_FAILED);
  17380. return 0;
  17381. }
  17382. SOKOL_ASSERT(vk_dev_mem);
  17383. return vk_dev_mem;
  17384. }
  17385. _SOKOL_PRIVATE void _sg_vk_mem_free_device_memory(VkDeviceMemory vk_dev_mem) {
  17386. SOKOL_ASSERT(_sg.vk.dev);
  17387. SOKOL_ASSERT(vk_dev_mem);
  17388. vkFreeMemory(_sg.vk.dev, vk_dev_mem, 0);
  17389. _sg_stats_inc(vk.num_free_memory);
  17390. }
  17391. _SOKOL_PRIVATE bool _sg_vk_mem_alloc_buffer_device_memory(_sg_buffer_t* buf) {
  17392. SOKOL_ASSERT(_sg.vk.dev);
  17393. SOKOL_ASSERT(buf);
  17394. SOKOL_ASSERT(buf->vk.buf);
  17395. SOKOL_ASSERT(0 == buf->vk.mem);
  17396. _SG_STRUCT(VkMemoryRequirements, mem_reqs);
  17397. vkGetBufferMemoryRequirements(_sg.vk.dev, buf->vk.buf, &mem_reqs);
  17398. _sg_vk_memtype_t mem_type = buf->cmn.usage.storage_buffer
  17399. ? _SG_VK_MEMTYPE_STORAGE_BUFFER
  17400. : _SG_VK_MEMTYPE_GENERIC_BUFFER;
  17401. buf->vk.mem = _sg_vk_mem_alloc_device_memory(mem_type, &mem_reqs);
  17402. if (0 == buf->vk.mem) {
  17403. _SG_ERROR(VULKAN_ALLOC_BUFFER_DEVICE_MEMORY_FAILED);
  17404. return false;
  17405. }
  17406. return true;
  17407. }
  17408. _SOKOL_PRIVATE bool _sg_vk_mem_alloc_image_device_memory(_sg_image_t* img) {
  17409. SOKOL_ASSERT(_sg.vk.dev);
  17410. SOKOL_ASSERT(img);
  17411. SOKOL_ASSERT(img->vk.img);
  17412. SOKOL_ASSERT(0 == img->vk.mem);
  17413. _SG_STRUCT(VkMemoryRequirements, mem_reqs);
  17414. vkGetImageMemoryRequirements(_sg.vk.dev, img->vk.img, &mem_reqs);
  17415. img->vk.mem = _sg_vk_mem_alloc_device_memory(_SG_VK_MEMTYPE_IMAGE, &mem_reqs);
  17416. if (0 == img->vk.mem) {
  17417. _SG_ERROR(VULKAN_ALLOC_IMAGE_DEVICE_MEMORY_FAILED);
  17418. return false;
  17419. }
  17420. return true;
  17421. }
  17422. _SOKOL_PRIVATE void _sg_vk_create_delete_queues(void) {
  17423. const uint32_t num_items = (uint32_t)
  17424. (2 * _sg.desc.buffer_pool_size +
  17425. 2 * _sg.desc.image_pool_size +
  17426. 1 * _sg.desc.sampler_pool_size +
  17427. 5 * _sg.desc.shader_pool_size +
  17428. 2 * _sg.desc.pipeline_pool_size +
  17429. 1 * _sg.desc.view_pool_size +
  17430. 256);
  17431. for (size_t i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  17432. _sg_vk_delete_queue_t* queue = &_sg.vk.frame.slot[i].delete_queue;
  17433. SOKOL_ASSERT(0 == queue->items);
  17434. SOKOL_ASSERT(0 == queue->index);
  17435. queue->num = num_items;
  17436. const size_t pool_size = num_items * sizeof(_sg_vk_delete_queue_item_t);
  17437. queue->items = (_sg_vk_delete_queue_item_t*)_sg_malloc(pool_size);
  17438. }
  17439. }
  17440. _SOKOL_PRIVATE void _sg_vk_delete_queue_collect_items(_sg_vk_delete_queue_t* queue) {
  17441. SOKOL_ASSERT(queue && queue->items);
  17442. for (uint32_t i = 0; i < queue->index; i++) {
  17443. _sg_vk_delete_queue_item_t* item = &queue->items[i];
  17444. SOKOL_ASSERT(item->destructor && item->obj);
  17445. item->destructor(item->obj);
  17446. item->destructor = 0;
  17447. item->obj = 0;
  17448. }
  17449. _sg_stats_add(vk.num_delete_queue_collected, queue->index);
  17450. queue->index = 0;
  17451. }
  17452. _SOKOL_PRIVATE void _sg_vk_destroy_delete_queues(void) {
  17453. for (size_t i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  17454. _sg_vk_delete_queue_t* queue = &_sg.vk.frame.slot[i].delete_queue;
  17455. SOKOL_ASSERT(queue->items);
  17456. _sg_vk_delete_queue_collect_items(queue);
  17457. _sg_free(queue->items);
  17458. SOKOL_ASSERT(queue->index == 0);
  17459. queue->items = 0;
  17460. queue->num = 0;
  17461. }
  17462. }
  17463. _SOKOL_PRIVATE _sg_vk_delete_queue_t* _sg_vk_cur_delete_queue(void) {
  17464. return &_sg.vk.frame.slot[_sg.vk.frame_slot].delete_queue;
  17465. }
  17466. _SOKOL_PRIVATE void _sg_vk_delete_queue_collect(void) {
  17467. _sg_vk_delete_queue_t* queue = _sg_vk_cur_delete_queue();
  17468. _sg_vk_delete_queue_collect_items(queue);
  17469. }
  17470. _SOKOL_PRIVATE void _sg_vk_delete_queue_add(_sg_vk_delete_queue_destructor_t destructor, void* obj) {
  17471. SOKOL_ASSERT(destructor && obj);
  17472. _sg_vk_delete_queue_t* queue = _sg_vk_cur_delete_queue();
  17473. SOKOL_ASSERT(queue->items);
  17474. if (queue->index >= queue->num) {
  17475. _SG_PANIC(VULKAN_DELETE_QUEUE_EXHAUSTED);
  17476. }
  17477. queue->items[queue->index].destructor = destructor;
  17478. queue->items[queue->index].obj = obj;
  17479. queue->index += 1;
  17480. _sg_stats_inc(vk.num_delete_queue_added);
  17481. }
  17482. // double-buffer system for any non-blocking CPU => GPU data
  17483. _SOKOL_PRIVATE void _sg_vk_shared_buffer_init(_sg_vk_shared_buffer_t* shbuf, uint32_t size, uint32_t align, _sg_vk_memtype_t mem_type, const char* label) {
  17484. SOKOL_ASSERT(_sg.vk.dev);
  17485. SOKOL_ASSERT(shbuf && (size > 0) && (align > 0));
  17486. SOKOL_ASSERT(0 == shbuf->size);
  17487. SOKOL_ASSERT(0 == shbuf->offset);
  17488. SOKOL_ASSERT(0 == shbuf->cur_buf);
  17489. SOKOL_ASSERT(false == shbuf->overflown);
  17490. VkResult res;
  17491. VkBufferUsageFlags vk_usage = 0;
  17492. bool want_device_address = false;
  17493. switch (mem_type) {
  17494. case _SG_VK_MEMTYPE_STAGING_STREAM:
  17495. vk_usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
  17496. break;
  17497. case _SG_VK_MEMTYPE_UNIFORMS:
  17498. vk_usage = VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
  17499. vk_usage |= VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT;
  17500. want_device_address = true;
  17501. break;
  17502. case _SG_VK_MEMTYPE_DESCRIPTORS:
  17503. vk_usage = VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT;
  17504. vk_usage |= VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT;
  17505. vk_usage |= VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT;
  17506. want_device_address = true;
  17507. break;
  17508. default:
  17509. SOKOL_UNREACHABLE;
  17510. break;
  17511. }
  17512. shbuf->size = _sg_roundup_u32(size, align);
  17513. shbuf->align = align;
  17514. for (size_t i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  17515. SOKOL_ASSERT(0 == shbuf->slots[i].buf);
  17516. SOKOL_ASSERT(0 == shbuf->slots[i].mem);
  17517. SOKOL_ASSERT(0 == shbuf->slots[i].mem_ptr);
  17518. _SG_STRUCT(VkBufferCreateInfo, buf_create_info);
  17519. buf_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  17520. buf_create_info.size = shbuf->size;
  17521. buf_create_info.usage = vk_usage;
  17522. buf_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  17523. res = vkCreateBuffer(_sg.vk.dev, &buf_create_info, 0, &shbuf->slots[i].buf);
  17524. if (res != VK_SUCCESS) {
  17525. _SG_PANIC(VULKAN_CREATE_SHARED_BUFFER_FAILED);
  17526. }
  17527. SOKOL_ASSERT(shbuf->slots[i].buf);
  17528. _sg_vk_set_object_label(VK_OBJECT_TYPE_BUFFER, (uint64_t)shbuf->slots[i].buf, label);
  17529. _SG_STRUCT(VkMemoryRequirements, mem_reqs);
  17530. vkGetBufferMemoryRequirements(_sg.vk.dev, shbuf->slots[i].buf, &mem_reqs);
  17531. shbuf->slots[i].mem = _sg_vk_mem_alloc_device_memory(mem_type, &mem_reqs);
  17532. if (0 == shbuf->slots[i].mem) {
  17533. _SG_PANIC(VULKAN_ALLOCATE_SHARED_BUFFER_MEMORY_FAILED);
  17534. }
  17535. res = vkBindBufferMemory(_sg.vk.dev, shbuf->slots[i].buf, shbuf->slots[i].mem, 0);
  17536. if (res != VK_SUCCESS) {
  17537. _SG_PANIC(VULKAN_BIND_SHARED_BUFFER_MEMORY_FAILED);
  17538. }
  17539. if (want_device_address) {
  17540. _SG_STRUCT(VkBufferDeviceAddressInfo, addr_info);
  17541. addr_info.sType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO;
  17542. addr_info.buffer = shbuf->slots[i].buf;
  17543. shbuf->slots[i].dev_addr = vkGetBufferDeviceAddress(_sg.vk.dev, &addr_info);
  17544. SOKOL_ASSERT(shbuf->slots[i].dev_addr);
  17545. }
  17546. res = vkMapMemory(_sg.vk.dev, shbuf->slots[i].mem, 0, VK_WHOLE_SIZE, 0, &shbuf->slots[i].mem_ptr);
  17547. if (res != VK_SUCCESS) {
  17548. _SG_PANIC(VULKAN_MAP_SHARED_BUFFER_MEMORY_FAILED);
  17549. }
  17550. SOKOL_ASSERT(shbuf->slots[i].mem_ptr);
  17551. }
  17552. }
  17553. _SOKOL_PRIVATE void _sg_vk_shared_buffer_discard(_sg_vk_shared_buffer_t* shbuf) {
  17554. SOKOL_ASSERT(_sg.vk.dev);
  17555. SOKOL_ASSERT(shbuf);
  17556. for (size_t i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  17557. SOKOL_ASSERT(shbuf->slots[i].buf);
  17558. SOKOL_ASSERT(shbuf->slots[i].mem);
  17559. SOKOL_ASSERT(shbuf->slots[i].mem_ptr);
  17560. vkUnmapMemory(_sg.vk.dev, shbuf->slots[i].mem);
  17561. shbuf->slots[i].mem_ptr = 0;
  17562. _sg_vk_mem_free_device_memory(shbuf->slots[i].mem);
  17563. shbuf->slots[i].mem = 0;
  17564. vkDestroyBuffer(_sg.vk.dev, shbuf->slots[i].buf, 0);
  17565. shbuf->slots[i].buf = 0;
  17566. shbuf->slots[i].dev_addr = 0;
  17567. }
  17568. shbuf->size = 0;
  17569. shbuf->offset = 0;
  17570. shbuf->cur_buf = 0;
  17571. shbuf->cur_dev_addr = 0;
  17572. shbuf->overflown = false;
  17573. }
  17574. _SOKOL_PRIVATE void _sg_vk_shared_buffer_after_acquire(_sg_vk_shared_buffer_t* shbuf) {
  17575. SOKOL_ASSERT(_sg.vk.dev);
  17576. SOKOL_ASSERT(0 == shbuf->cur_buf);
  17577. SOKOL_ASSERT(0 == shbuf->cur_mem_ptr);
  17578. SOKOL_ASSERT(0 == shbuf->cur_dev_addr);
  17579. const uint32_t frame_slot = _sg.vk.frame_slot;
  17580. shbuf->offset = 0;
  17581. shbuf->cur_buf = shbuf->slots[frame_slot].buf;
  17582. shbuf->cur_mem_ptr = shbuf->slots[frame_slot].mem_ptr;
  17583. shbuf->cur_dev_addr = shbuf->slots[frame_slot].dev_addr; // NOTE: may be 0
  17584. shbuf->overflown = false;
  17585. SOKOL_ASSERT(shbuf->cur_buf);
  17586. SOKOL_ASSERT(shbuf->cur_mem_ptr);
  17587. }
  17588. _SOKOL_PRIVATE void _sg_vk_shared_buffer_before_submit(_sg_vk_shared_buffer_t* shbuf) {
  17589. SOKOL_ASSERT(shbuf->cur_buf);
  17590. SOKOL_ASSERT(shbuf->cur_mem_ptr);
  17591. // NOTE: if the buffer wouldn't be cache-coherent, this would be the place to do a flush
  17592. shbuf->cur_buf = 0;
  17593. shbuf->cur_mem_ptr = 0;
  17594. shbuf->cur_dev_addr = 0;
  17595. }
  17596. _SOKOL_PRIVATE VkDeviceSize _sg_vk_shared_buffer_alloc(_sg_vk_shared_buffer_t* shbuf, uint32_t num_bytes) {
  17597. SOKOL_ASSERT(shbuf && (num_bytes > 0));
  17598. if (shbuf->overflown) {
  17599. return _SG_VK_SHARED_BUFFER_OVERFLOW_RESULT;
  17600. }
  17601. if ((shbuf->offset + num_bytes) > shbuf->size) {
  17602. shbuf->overflown = true;
  17603. return _SG_VK_SHARED_BUFFER_OVERFLOW_RESULT;
  17604. }
  17605. SOKOL_ASSERT((shbuf->offset & (shbuf->align - 1)) == 0);
  17606. VkDeviceSize offset = shbuf->offset;
  17607. shbuf->offset = _sg_roundup_u32(shbuf->offset + num_bytes, shbuf->align);
  17608. return offset;
  17609. }
  17610. _SOKOL_PRIVATE uint8_t* _sg_vk_shared_buffer_ptr(_sg_vk_shared_buffer_t* shbuf, VkDeviceSize offset) {
  17611. SOKOL_ASSERT(shbuf && shbuf->cur_mem_ptr);
  17612. SOKOL_ASSERT(!shbuf->overflown);
  17613. SOKOL_ASSERT(offset < shbuf->size);
  17614. return ((uint8_t*)shbuf->cur_mem_ptr) + offset;
  17615. }
  17616. _SOKOL_PRIVATE VkDeviceSize _sg_vk_shared_buffer_memcpy(_sg_vk_shared_buffer_t* shbuf, const void* src_ptr, uint32_t num_bytes) {
  17617. SOKOL_ASSERT(shbuf && src_ptr && (num_bytes > 0));
  17618. const VkDeviceSize offset = _sg_vk_shared_buffer_alloc(shbuf, num_bytes);
  17619. if (offset != _SG_VK_SHARED_BUFFER_OVERFLOW_RESULT) {
  17620. memcpy(_sg_vk_shared_buffer_ptr(shbuf, offset), src_ptr, num_bytes);
  17621. }
  17622. return offset;
  17623. }
  17624. // staging system for blocking immutable and dynamic updates, can deal arbitrarily sized data
  17625. _SOKOL_PRIVATE void _sg_vk_staging_copy_init(void) {
  17626. SOKOL_ASSERT(_sg.vk.dev);
  17627. VkResult res;
  17628. SOKOL_ASSERT(0 == _sg.vk.stage.copy.cmd_pool);
  17629. SOKOL_ASSERT(0 == _sg.vk.stage.copy.cmd_buf);
  17630. SOKOL_ASSERT(0 == _sg.vk.stage.copy.size);
  17631. SOKOL_ASSERT(0 == _sg.vk.stage.copy.buf);
  17632. SOKOL_ASSERT(0 == _sg.vk.stage.copy.mem);
  17633. SOKOL_ASSERT(_sg.desc.vulkan.copy_staging_buffer_size > 0);
  17634. _SG_STRUCT(VkCommandPoolCreateInfo, pool_create_info);
  17635. pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  17636. pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT | VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
  17637. pool_create_info.queueFamilyIndex = _sg.vk.queue_family_index;
  17638. res = vkCreateCommandPool(_sg.vk.dev, &pool_create_info, 0, &_sg.vk.stage.copy.cmd_pool);
  17639. SOKOL_ASSERT((res == VK_SUCCESS && _sg.vk.stage.copy.cmd_pool));
  17640. _sg_vk_set_object_label(VK_OBJECT_TYPE_COMMAND_POOL, (uint64_t)_sg.vk.stage.copy.cmd_pool, "copy-staging cmd pool");
  17641. _SG_STRUCT(VkCommandBufferAllocateInfo, cmdbuf_alloc_info);
  17642. cmdbuf_alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  17643. cmdbuf_alloc_info.commandPool = _sg.vk.stage.copy.cmd_pool;
  17644. cmdbuf_alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  17645. cmdbuf_alloc_info.commandBufferCount = 1;
  17646. res = vkAllocateCommandBuffers(_sg.vk.dev, &cmdbuf_alloc_info, &_sg.vk.stage.copy.cmd_buf);
  17647. SOKOL_ASSERT((res == VK_SUCCESS) && _sg.vk.stage.copy.cmd_buf);
  17648. _sg_vk_set_object_label(VK_OBJECT_TYPE_COMMAND_BUFFER, (uint64_t)_sg.vk.stage.copy.cmd_buf, "copy-staging cmd buffer");
  17649. _sg.vk.stage.copy.size = (uint32_t) _sg.desc.vulkan.copy_staging_buffer_size;
  17650. _SG_STRUCT(VkBufferCreateInfo, buf_create_info);
  17651. buf_create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  17652. buf_create_info.size = _sg.vk.stage.copy.size;
  17653. buf_create_info.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
  17654. buf_create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  17655. res = vkCreateBuffer(_sg.vk.dev, &buf_create_info, 0, &_sg.vk.stage.copy.buf);
  17656. if (res != VK_SUCCESS) {
  17657. _SG_PANIC(VULKAN_STAGING_CREATE_BUFFER_FAILED);
  17658. }
  17659. SOKOL_ASSERT(_sg.vk.stage.copy.buf);
  17660. _sg_vk_set_object_label(VK_OBJECT_TYPE_BUFFER, (uint64_t)_sg.vk.stage.copy.buf, "copy-staging staging buffer");
  17661. _SG_STRUCT(VkMemoryRequirements, mem_reqs);
  17662. vkGetBufferMemoryRequirements(_sg.vk.dev, _sg.vk.stage.copy.buf, &mem_reqs);
  17663. _sg.vk.stage.copy.mem = _sg_vk_mem_alloc_device_memory(_SG_VK_MEMTYPE_STAGING_COPY, &mem_reqs);
  17664. if (0 == _sg.vk.stage.copy.mem) {
  17665. _SG_PANIC(VULKAN_STAGING_ALLOCATE_MEMORY_FAILED);
  17666. }
  17667. res = vkBindBufferMemory(_sg.vk.dev, _sg.vk.stage.copy.buf, _sg.vk.stage.copy.mem, 0);
  17668. if (res != VK_SUCCESS) {
  17669. _SG_PANIC(VULKAN_STAGING_BIND_BUFFER_MEMORY_FAILED);
  17670. }
  17671. }
  17672. _SOKOL_PRIVATE void _sg_vk_staging_copy_discard(void) {
  17673. SOKOL_ASSERT(_sg.vk.dev);
  17674. SOKOL_ASSERT(_sg.vk.stage.copy.cmd_pool);
  17675. SOKOL_ASSERT(_sg.vk.stage.copy.cmd_buf);
  17676. SOKOL_ASSERT(_sg.vk.stage.copy.size);
  17677. SOKOL_ASSERT(_sg.vk.stage.copy.buf);
  17678. SOKOL_ASSERT(_sg.vk.stage.copy.mem);
  17679. _sg_vk_mem_free_device_memory(_sg.vk.stage.copy.mem);
  17680. _sg.vk.stage.copy.mem = 0;
  17681. vkDestroyBuffer(_sg.vk.dev, _sg.vk.stage.copy.buf, 0);
  17682. _sg.vk.stage.copy.buf = 0;
  17683. vkDestroyCommandPool(_sg.vk.dev, _sg.vk.stage.copy.cmd_pool, 0);
  17684. _sg.vk.stage.copy.cmd_pool = 0;
  17685. _sg.vk.stage.copy.cmd_buf = 0;
  17686. _sg.vk.stage.copy.size = 0;
  17687. }
  17688. _SOKOL_PRIVATE VkCommandBuffer _sg_vk_staging_copy_begin(void) {
  17689. VkCommandBuffer cmd_buf = _sg.vk.stage.copy.cmd_buf;
  17690. _SG_STRUCT(VkCommandBufferBeginInfo, cmdbuf_begin_info);
  17691. cmdbuf_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  17692. cmdbuf_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  17693. VkResult res = vkBeginCommandBuffer(cmd_buf, &cmdbuf_begin_info);
  17694. SOKOL_ASSERT(res == VK_SUCCESS); _SOKOL_UNUSED(res);
  17695. return cmd_buf;
  17696. }
  17697. _SOKOL_PRIVATE void _sg_vk_staging_copy_end(VkCommandBuffer cmd_buf, VkQueue queue) {
  17698. SOKOL_ASSERT(cmd_buf && queue);
  17699. VkResult res;
  17700. _SOKOL_UNUSED(res);
  17701. vkEndCommandBuffer(cmd_buf);
  17702. _SG_STRUCT(VkSubmitInfo, submit_info);
  17703. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  17704. submit_info.commandBufferCount = 1;
  17705. submit_info.pCommandBuffers = &cmd_buf;
  17706. res = vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
  17707. SOKOL_ASSERT(res == VK_SUCCESS);
  17708. res = vkQueueWaitIdle(queue);
  17709. SOKOL_ASSERT(res == VK_SUCCESS);
  17710. res = vkResetCommandBuffer(cmd_buf, 0);
  17711. SOKOL_ASSERT(res == VK_SUCCESS);
  17712. }
  17713. _SOKOL_PRIVATE void _sg_vk_staging_map_memcpy_unmap(VkDeviceMemory mem, const void* ptr, uint32_t num_bytes) {
  17714. SOKOL_ASSERT(_sg.vk.dev);
  17715. SOKOL_ASSERT(mem);
  17716. SOKOL_ASSERT(ptr);
  17717. SOKOL_ASSERT(num_bytes > 0);
  17718. void* dst_ptr = 0;
  17719. VkResult res = vkMapMemory(_sg.vk.dev, mem, 0, VK_WHOLE_SIZE, 0, &dst_ptr);
  17720. SOKOL_ASSERT((res == VK_SUCCESS) && dst_ptr); _SOKOL_UNUSED(res);
  17721. memcpy(dst_ptr, ptr, num_bytes);
  17722. vkUnmapMemory(_sg.vk.dev, mem);
  17723. }
  17724. _SOKOL_PRIVATE void _sg_vk_staging_copy_buffer_data(_sg_buffer_t* buf, const sg_range* src_data, size_t dst_offset, bool initial_wait) {
  17725. SOKOL_ASSERT(_sg.vk.dev);
  17726. SOKOL_ASSERT(_sg.vk.queue);
  17727. SOKOL_ASSERT(_sg.vk.stage.copy.mem);
  17728. SOKOL_ASSERT(_sg.vk.stage.copy.buf);
  17729. SOKOL_ASSERT(buf && buf->vk.buf);
  17730. SOKOL_ASSERT(src_data && src_data->ptr && (src_data->size > 0));
  17731. SOKOL_ASSERT((dst_offset + src_data->size) <= (size_t)buf->cmn.size);
  17732. // an inital wait is only needed for updating existing resources but not when populating a new resource
  17733. if (initial_wait) {
  17734. VkResult res = vkQueueWaitIdle(_sg.vk.queue);
  17735. SOKOL_ASSERT(res == VK_SUCCESS); _SOKOL_UNUSED(res);
  17736. }
  17737. VkDeviceMemory dst_mem = _sg.vk.stage.copy.mem;
  17738. VkBuffer src_buf = _sg.vk.stage.copy.buf;
  17739. VkBuffer dst_buf = buf->vk.buf;
  17740. const uint8_t* src_ptr = (const uint8_t*)src_data->ptr;
  17741. uint32_t dst_size = _sg.vk.stage.copy.size;
  17742. uint32_t bytes_remaining = (uint32_t)src_data->size;
  17743. _SG_STRUCT(VkBufferCopy, region);
  17744. region.dstOffset = dst_offset;
  17745. while (bytes_remaining > 0) {
  17746. uint64_t bytes_to_copy = bytes_remaining;
  17747. if (bytes_remaining > dst_size) {
  17748. bytes_to_copy = dst_size;
  17749. bytes_remaining -= dst_size;
  17750. } else {
  17751. bytes_to_copy = bytes_remaining;
  17752. bytes_remaining = 0;
  17753. }
  17754. region.size = bytes_to_copy;
  17755. _sg_vk_staging_map_memcpy_unmap(dst_mem, src_ptr, bytes_to_copy);
  17756. VkCommandBuffer cmd_buf = _sg_vk_staging_copy_begin();
  17757. vkCmdCopyBuffer(cmd_buf, src_buf, dst_buf, 1, &region);
  17758. _sg_stats_inc(vk.num_cmd_copy_buffer);
  17759. _sg_vk_staging_copy_end(cmd_buf, _sg.vk.queue);
  17760. src_ptr += bytes_to_copy;
  17761. region.dstOffset += bytes_to_copy;
  17762. }
  17763. buf->vk.cur_access = _SG_VK_ACCESS_VERTEXBUFFER | _SG_VK_ACCESS_INDEXBUFFER | _SG_VK_ACCESS_STORAGEBUFFER_RO;
  17764. }
  17765. _SOKOL_PRIVATE void _sg_vk_init_vk_image_staging_structs(const _sg_image_t* img, VkBuffer vk_buf, VkBufferImageCopy2* region, VkCopyBufferToImageInfo2* copy_info) {
  17766. SOKOL_ASSERT(img && region && copy_info);
  17767. region->sType = VK_STRUCTURE_TYPE_BUFFER_IMAGE_COPY_2;
  17768. if (_sg_is_depth_or_depth_stencil_format(img->cmn.pixel_format)) {
  17769. region->imageSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  17770. if (_sg_is_depth_stencil_format(img->cmn.pixel_format)) {
  17771. region->imageSubresource.aspectMask = VK_IMAGE_ASPECT_STENCIL_BIT;
  17772. }
  17773. } else {
  17774. region->imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  17775. }
  17776. region->imageSubresource.layerCount = 1;
  17777. region->imageExtent.depth = 1;
  17778. copy_info->sType = VK_STRUCTURE_TYPE_COPY_BUFFER_TO_IMAGE_INFO_2;
  17779. copy_info->srcBuffer = vk_buf;
  17780. copy_info->dstImage = img->vk.img;
  17781. copy_info->dstImageLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
  17782. copy_info->regionCount = 1;
  17783. copy_info->pRegions = region;
  17784. }
  17785. _SOKOL_PRIVATE void _sg_vk_staging_copy_image_data(_sg_image_t* img, const sg_image_data* src_data, bool initial_wait) {
  17786. SOKOL_ASSERT(_sg.vk.dev);
  17787. SOKOL_ASSERT(_sg.vk.queue);
  17788. SOKOL_ASSERT(_sg.vk.stage.copy.mem);
  17789. SOKOL_ASSERT(_sg.vk.stage.copy.buf);
  17790. SOKOL_ASSERT(img && img->vk.img);
  17791. const uint32_t block_dim = (uint32_t)_sg_block_dim(img->cmn.pixel_format);
  17792. // an inital wait is only needed for updating existing resources but not when populating a new resource
  17793. if (initial_wait) {
  17794. VkResult res = vkQueueWaitIdle(_sg.vk.queue);
  17795. SOKOL_ASSERT(res == VK_SUCCESS); _SOKOL_UNUSED(res);
  17796. }
  17797. VkDeviceMemory mem = _sg.vk.stage.copy.mem;
  17798. _SG_STRUCT(VkBufferImageCopy2, region);
  17799. _SG_STRUCT(VkCopyBufferToImageInfo2, copy_info);
  17800. _sg_vk_init_vk_image_staging_structs(img, _sg.vk.stage.copy.buf, &region, &copy_info);
  17801. for (int mip_index = 0; mip_index < img->cmn.num_mipmaps; mip_index++) {
  17802. const uint8_t* src_ptr = (uint8_t*)src_data->mip_levels[mip_index].ptr;
  17803. int mip_width = _sg_miplevel_dim(img->cmn.width, mip_index);
  17804. int mip_height = _sg_miplevel_dim(img->cmn.height, mip_index);
  17805. int mip_slices = (img->cmn.type == SG_IMAGETYPE_3D) ? _sg_miplevel_dim(img->cmn.num_slices, mip_index) : img->cmn.num_slices;
  17806. const uint32_t row_pitch = (uint32_t) _sg_row_pitch(img->cmn.pixel_format, mip_width, 1);
  17807. const uint32_t num_rows = (uint32_t) _sg_num_rows(img->cmn.pixel_format, mip_height);
  17808. region.imageSubresource.mipLevel = (uint32_t)mip_index;
  17809. region.imageExtent.width = (uint32_t)mip_width;
  17810. const uint32_t max_rows = _sg.vk.stage.copy.size / row_pitch;
  17811. for (int slice_index = 0; slice_index < mip_slices; slice_index++) {
  17812. if (img->cmn.type == SG_IMAGETYPE_3D) {
  17813. region.imageOffset.z = slice_index;
  17814. } else {
  17815. region.imageSubresource.baseArrayLayer = (uint32_t)slice_index;
  17816. }
  17817. uint32_t rows_remaining = num_rows;
  17818. uint32_t cur_row = 0;
  17819. while (rows_remaining > 0) {
  17820. uint32_t rows_to_copy = rows_remaining;
  17821. if (rows_remaining > max_rows) {
  17822. rows_to_copy = max_rows;
  17823. rows_remaining -= max_rows;
  17824. } else {
  17825. rows_to_copy = rows_remaining;
  17826. rows_remaining = 0;
  17827. }
  17828. const uint32_t bytes_to_copy = rows_to_copy * row_pitch;
  17829. SOKOL_ASSERT(bytes_to_copy <= _sg.vk.stage.copy.size);
  17830. _sg_vk_staging_map_memcpy_unmap(mem, src_ptr, bytes_to_copy);
  17831. src_ptr += bytes_to_copy;
  17832. VkCommandBuffer cmd_buf = _sg_vk_staging_copy_begin();
  17833. _sg_vk_image_barrier(cmd_buf, img, _SG_VK_ACCESS_STAGING);
  17834. region.imageOffset.y = (int32_t)(cur_row * block_dim);
  17835. region.imageExtent.height = _sg_min((uint32_t)mip_height, rows_to_copy * block_dim);
  17836. vkCmdCopyBufferToImage2(cmd_buf, &copy_info);
  17837. _sg_stats_inc(vk.num_cmd_copy_buffer_to_image);
  17838. _sg_vk_image_barrier(cmd_buf, img, _SG_VK_ACCESS_TEXTURE);
  17839. _sg_vk_staging_copy_end(cmd_buf, _sg.vk.queue);
  17840. cur_row += rows_to_copy;
  17841. }
  17842. }
  17843. }
  17844. }
  17845. // staging system for non-blocking streaming updates with a max per-frame data limit
  17846. _SOKOL_PRIVATE void _sg_vk_staging_stream_init(void) {
  17847. SOKOL_ASSERT(_sg.desc.vulkan.stream_staging_buffer_size > 0);
  17848. _sg_vk_shared_buffer_init(&_sg.vk.stage.stream,
  17849. (uint32_t)_sg.desc.vulkan.stream_staging_buffer_size,
  17850. 16, // NOTE: arbitrary alignment (FIXME?)
  17851. _SG_VK_MEMTYPE_STAGING_STREAM,
  17852. "shared-stream-buffer");
  17853. }
  17854. _SOKOL_PRIVATE void _sg_vk_staging_stream_discard(void) {
  17855. _sg_vk_shared_buffer_discard(&_sg.vk.stage.stream);
  17856. }
  17857. _SOKOL_PRIVATE void _sg_vk_staging_stream_after_acquire(void) {
  17858. _sg_vk_shared_buffer_after_acquire(&_sg.vk.stage.stream);
  17859. }
  17860. _SOKOL_PRIVATE void _sg_vk_staging_stream_before_submit(void) {
  17861. _sg_vk_shared_buffer_before_submit(&_sg.vk.stage.stream);
  17862. }
  17863. _SOKOL_PRIVATE void _sg_vk_staging_stream_buffer_data(_sg_buffer_t* buf, const sg_range* src_data, size_t dst_offset) {
  17864. SOKOL_ASSERT(_sg.vk.dev);
  17865. SOKOL_ASSERT(_sg.vk.frame.stream_cmd_buf);
  17866. SOKOL_ASSERT(_sg.vk.stage.stream.cur_buf);
  17867. SOKOL_ASSERT(buf && buf->vk.buf);
  17868. SOKOL_ASSERT(src_data && src_data->ptr && (src_data->size > 0));
  17869. SOKOL_ASSERT((src_data->size + dst_offset) <= (size_t)buf->cmn.size);
  17870. const uint32_t src_offset = _sg_vk_shared_buffer_memcpy(&_sg.vk.stage.stream, src_data->ptr, src_data->size);
  17871. if (src_offset == _SG_VK_SHARED_BUFFER_OVERFLOW_RESULT) {
  17872. _SG_ERROR(VULKAN_STAGING_STREAM_BUFFER_OVERFLOW);
  17873. return;
  17874. }
  17875. VkCommandBuffer cmd_buf = _sg.vk.frame.stream_cmd_buf;
  17876. VkBuffer vk_src_buf = _sg.vk.stage.stream.cur_buf;
  17877. VkBuffer vk_dst_buf = buf->vk.buf;
  17878. _SG_STRUCT(VkBufferCopy, region);
  17879. region.srcOffset = src_offset;
  17880. region.dstOffset = dst_offset;
  17881. region.size = src_data->size;
  17882. _sg_vk_buffer_barrier(cmd_buf, buf, _SG_VK_ACCESS_STAGING);
  17883. vkCmdCopyBuffer(cmd_buf, vk_src_buf, vk_dst_buf, 1, &region);
  17884. _sg_stats_inc(vk.num_cmd_copy_buffer);
  17885. // FIXME: not great to issue a barrier right here,
  17886. // rethink buffer barrier strategy? => a single memory barrier
  17887. // at the end of the stream command buffer should be sufficient?
  17888. _sg_vk_buffer_barrier(cmd_buf, buf, _SG_VK_ACCESS_VERTEXBUFFER|_SG_VK_ACCESS_INDEXBUFFER|_SG_VK_ACCESS_STORAGEBUFFER_RO);
  17889. }
  17890. _SOKOL_PRIVATE void _sg_vk_staging_stream_image_data(_sg_image_t* img, const sg_image_data* src_data) {
  17891. SOKOL_ASSERT(_sg.vk.dev);
  17892. SOKOL_ASSERT(_sg.vk.frame.stream_cmd_buf);
  17893. SOKOL_ASSERT(img && img->vk.img);
  17894. SOKOL_ASSERT(src_data);
  17895. VkCommandBuffer cmd_buf = _sg.vk.frame.stream_cmd_buf;
  17896. _sg_vk_image_barrier(cmd_buf, img, _SG_VK_ACCESS_STAGING);
  17897. _SG_STRUCT(VkBufferImageCopy2, region);
  17898. _SG_STRUCT(VkCopyBufferToImageInfo2, copy_info);
  17899. _sg_vk_init_vk_image_staging_structs(img, _sg.vk.stage.stream.cur_buf, &region, &copy_info);
  17900. for (int mip_index = 0; mip_index < img->cmn.num_mipmaps; mip_index++) {
  17901. const sg_range* src_mip = &src_data->mip_levels[mip_index];
  17902. SOKOL_ASSERT(src_mip->ptr);
  17903. SOKOL_ASSERT(src_mip->size > 0);
  17904. const uint32_t src_offset = _sg_vk_shared_buffer_memcpy(&_sg.vk.stage.stream, src_mip->ptr, src_mip->size);
  17905. if (src_offset == _SG_VK_SHARED_BUFFER_OVERFLOW_RESULT) {
  17906. _SG_ERROR(VULKAN_STAGING_STREAM_BUFFER_OVERFLOW);
  17907. _sg_vk_image_barrier(cmd_buf, img, _SG_VK_ACCESS_TEXTURE);
  17908. return;
  17909. }
  17910. region.bufferOffset = src_offset;
  17911. int mip_width = _sg_miplevel_dim(img->cmn.width, mip_index);
  17912. int mip_height = _sg_miplevel_dim(img->cmn.height, mip_index);
  17913. int mip_slices = (img->cmn.type == SG_IMAGETYPE_3D) ? _sg_miplevel_dim(img->cmn.num_slices, mip_index) : img->cmn.num_slices;
  17914. region.imageExtent.width = (uint32_t)mip_width;
  17915. region.imageExtent.height = (uint32_t)mip_height;
  17916. region.imageSubresource.mipLevel = (uint32_t)mip_index;
  17917. if (img->cmn.type == SG_IMAGETYPE_3D) {
  17918. region.imageExtent.depth = (uint32_t)mip_slices;
  17919. region.imageSubresource.layerCount = 1;
  17920. } else {
  17921. region.imageExtent.depth = 1;
  17922. region.imageSubresource.layerCount = (uint32_t)mip_slices;
  17923. }
  17924. vkCmdCopyBufferToImage2(cmd_buf, &copy_info);
  17925. _sg_stats_inc(vk.num_cmd_copy_buffer_to_image);
  17926. }
  17927. _sg_vk_image_barrier(cmd_buf, img, _SG_VK_ACCESS_TEXTURE);
  17928. }
  17929. // uniform data system
  17930. _SOKOL_PRIVATE void _sg_vk_uniform_init(void) {
  17931. SOKOL_ASSERT(_sg.desc.uniform_buffer_size > 0);
  17932. _sg_vk_shared_buffer_init(&_sg.vk.uniform,
  17933. (uint32_t)_sg.desc.uniform_buffer_size,
  17934. _sg.vk.dev_props.properties.limits.minUniformBufferOffsetAlignment,
  17935. _SG_VK_MEMTYPE_UNIFORMS,
  17936. "shared-uniform-buffer");
  17937. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  17938. _sg_vk_uniform_bindinfo_t* ubi = &_sg.vk.uniform_bindinfos[i];
  17939. ubi->addr_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT;
  17940. ubi->get_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT;
  17941. ubi->get_info.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  17942. ubi->get_info.data.pUniformBuffer = &ubi->addr_info;
  17943. }
  17944. }
  17945. _SOKOL_PRIVATE void _sg_vk_uniform_discard(void) {
  17946. _sg_vk_shared_buffer_discard(&_sg.vk.uniform);
  17947. }
  17948. // called from _sg_vk_acquire_frame_command_buffer()
  17949. _SOKOL_PRIVATE void _sg_vk_uniform_after_acquire(void) {
  17950. _sg_vk_shared_buffer_after_acquire(&_sg.vk.uniform);
  17951. // reset uniform tracking data
  17952. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  17953. _sg_vk_uniform_bindinfo_t* ubi = &_sg.vk.uniform_bindinfos[i];
  17954. ubi->addr_info.address = 0;
  17955. ubi->addr_info.range = 0;
  17956. }
  17957. }
  17958. // called from _sg_vk_submit_frame_command_buffer()
  17959. _SOKOL_PRIVATE void _sg_vk_uniform_before_submit(void) {
  17960. _sg_vk_shared_buffer_before_submit(&_sg.vk.uniform);
  17961. }
  17962. // called form _sg_vk_apply_uniforms, returns offset of data snippet into uniform buffer
  17963. _SOKOL_PRIVATE uint32_t _sg_vk_uniform_copy(const sg_range* data) {
  17964. SOKOL_ASSERT(data && data->ptr && (data->size > 0));
  17965. return _sg_vk_shared_buffer_memcpy(&_sg.vk.uniform, data->ptr, data->size);
  17966. }
  17967. // resource binding system
  17968. _SOKOL_PRIVATE void _sg_vk_bind_init(void) {
  17969. SOKOL_ASSERT(_sg.desc.vulkan.descriptor_buffer_size > 0);
  17970. _sg_vk_shared_buffer_init(&_sg.vk.bind,
  17971. (uint32_t)_sg.desc.vulkan.descriptor_buffer_size,
  17972. _sg.vk.descriptor_buffer_props.descriptorBufferOffsetAlignment,
  17973. _SG_VK_MEMTYPE_DESCRIPTORS,
  17974. "shared-descriptor-buffer");
  17975. }
  17976. _SOKOL_PRIVATE void _sg_vk_bind_discard(void) {
  17977. _sg_vk_shared_buffer_discard(&_sg.vk.bind);
  17978. }
  17979. // called from _sg_vk_acquire_frame_command_buffer()
  17980. _SOKOL_PRIVATE void _sg_vk_bind_after_acquire(void) {
  17981. _sg_vk_shared_buffer_after_acquire(&_sg.vk.bind);
  17982. // bind the current frame's descriptor buffer
  17983. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  17984. SOKOL_ASSERT(_sg.vk.bind.cur_buf);
  17985. SOKOL_ASSERT(_sg.vk.bind.cur_dev_addr);
  17986. _SG_STRUCT(VkDescriptorBufferBindingInfoEXT, bind_info);
  17987. bind_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_BUFFER_BINDING_INFO_EXT;
  17988. bind_info.address = _sg.vk.bind.cur_dev_addr;
  17989. bind_info.usage = VK_BUFFER_USAGE_RESOURCE_DESCRIPTOR_BUFFER_BIT_EXT |
  17990. VK_BUFFER_USAGE_SAMPLER_DESCRIPTOR_BUFFER_BIT_EXT;
  17991. _sg.vk.ext.cmd_bind_descriptor_buffers(_sg.vk.frame.cmd_buf, 1, &bind_info);
  17992. }
  17993. // called from _sg_vk_submit_frame_command_buffer()
  17994. _SOKOL_PRIVATE void _sg_vk_bind_before_submit(void) {
  17995. _sg_vk_shared_buffer_before_submit(&_sg.vk.bind);
  17996. }
  17997. _SOKOL_PRIVATE bool _sg_vk_bind_view_smp_descriptor_set(VkCommandBuffer cmd_buf, const _sg_bindings_ptrs_t* bnd, VkPipelineBindPoint vk_bind_point) {
  17998. SOKOL_ASSERT(_sg.vk.dev);
  17999. SOKOL_ASSERT(cmd_buf);
  18000. SOKOL_ASSERT(bnd && bnd->pip);
  18001. const _sg_shader_t* shd = _sg_shader_ref_ptr(&bnd->pip->cmn.shader);
  18002. // get next pointer in descriptor buffer
  18003. const VkDeviceSize dset_size = shd->vk.view_smp_dset_size;
  18004. if (dset_size == 0) {
  18005. // nothing to bind
  18006. return true;
  18007. }
  18008. const VkDeviceSize dbuf_offset = _sg_vk_shared_buffer_alloc(&_sg.vk.bind, dset_size);
  18009. if (_sg.vk.bind.overflown) {
  18010. _SG_ERROR(VULKAN_DESCRIPTOR_BUFFER_OVERFLOW);
  18011. return false;
  18012. }
  18013. _sg_stats_add(vk.size_descriptor_buffer_writes, dset_size);
  18014. uint8_t* dbuf_ptr = _sg_vk_shared_buffer_ptr(&_sg.vk.bind, dbuf_offset);
  18015. // copy pre-recorded descriptor data into descriptor buffer
  18016. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  18017. if (shd->cmn.views[i].stage == SG_SHADERSTAGE_NONE) {
  18018. continue;
  18019. }
  18020. const _sg_view_t* view = bnd->views[i];
  18021. SOKOL_ASSERT(view && (view->vk.descriptor_size > 0));
  18022. const void* src_ptr = view->vk.descriptor_data;
  18023. size_t size = view->vk.descriptor_size;
  18024. void* dst_ptr = dbuf_ptr + shd->vk.view_dset_offsets[i];
  18025. memcpy(dst_ptr, src_ptr, size);
  18026. }
  18027. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  18028. if (shd->cmn.samplers[i].stage == SG_SHADERSTAGE_NONE) {
  18029. continue;
  18030. }
  18031. const _sg_sampler_t* smp = bnd->smps[i];
  18032. SOKOL_ASSERT(smp && (smp->vk.descriptor_size > 0));
  18033. const void* src_ptr = smp->vk.descriptor_data;
  18034. size_t size = smp->vk.descriptor_size;
  18035. void* dst_ptr = dbuf_ptr + shd->vk.smp_dset_offsets[i];
  18036. memcpy(dst_ptr, src_ptr, size);
  18037. }
  18038. // record the new descriptor buffer offset
  18039. const uint32_t dbuf_index = 0;
  18040. SOKOL_ASSERT(shd->vk.pip_layout);
  18041. _sg.vk.ext.cmd_set_descriptor_buffer_offsets(
  18042. cmd_buf,
  18043. vk_bind_point,
  18044. shd->vk.pip_layout,
  18045. _SG_VK_VIEW_SMP_DESCRIPTORSET_INDEX, // firstSet
  18046. 1, // setCount
  18047. &dbuf_index,
  18048. &dbuf_offset);
  18049. _sg_stats_inc(vk.num_cmd_set_descriptor_buffer_offsets);
  18050. return true;
  18051. }
  18052. _SOKOL_PRIVATE bool _sg_vk_bind_uniform_descriptor_set(VkCommandBuffer cmd_buf) {
  18053. SOKOL_ASSERT(cmd_buf);
  18054. SOKOL_ASSERT(_sg.vk.uniforms_dirty);
  18055. _sg.vk.uniforms_dirty = false;
  18056. const _sg_pipeline_t* pip = _sg_pipeline_ref_ptr(&_sg.cur_pip);
  18057. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  18058. // get next pointer in descriptor buffer
  18059. const VkDeviceSize dbuf_offset = _sg_vk_shared_buffer_alloc(&_sg.vk.bind, shd->vk.ub_dset_size);
  18060. if (_sg.vk.bind.overflown) {
  18061. _SG_ERROR(VULKAN_DESCRIPTOR_BUFFER_OVERFLOW);
  18062. return false;
  18063. }
  18064. _sg_stats_add(vk.size_descriptor_buffer_writes, shd->vk.ub_dset_size);
  18065. uint8_t* dbuf_ptr = _sg_vk_shared_buffer_ptr(&_sg.vk.bind, dbuf_offset);
  18066. // update descriptor buffer
  18067. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  18068. if (shd->cmn.uniform_blocks[i].stage == SG_SHADERSTAGE_NONE) {
  18069. continue;
  18070. }
  18071. _sg.vk.ext.get_descriptor(_sg.vk.dev,
  18072. &_sg.vk.uniform_bindinfos[i].get_info,
  18073. _sg.vk.descriptor_buffer_props.uniformBufferDescriptorSize,
  18074. dbuf_ptr + shd->vk.ub_dset_offsets[i]);
  18075. }
  18076. // record the descriptor buffer offset
  18077. const VkPipelineBindPoint vk_bind_point = _sg.cur_pass.is_compute
  18078. ? VK_PIPELINE_BIND_POINT_COMPUTE
  18079. : VK_PIPELINE_BIND_POINT_GRAPHICS;
  18080. const uint32_t dbuf_index = 0;
  18081. SOKOL_ASSERT(shd->vk.pip_layout);
  18082. _sg.vk.ext.cmd_set_descriptor_buffer_offsets(
  18083. cmd_buf,
  18084. vk_bind_point,
  18085. shd->vk.pip_layout,
  18086. _SG_VK_UB_DESCRIPTORSET_INDEX, // firstIndex
  18087. 1, // setCount
  18088. &dbuf_index,
  18089. &dbuf_offset);
  18090. _sg_stats_inc(vk.num_cmd_set_descriptor_buffer_offsets);
  18091. return true;
  18092. }
  18093. _SOKOL_PRIVATE void _sg_vk_memory_destructor(void* obj) {
  18094. SOKOL_ASSERT(_sg.vk.dev && obj);
  18095. _sg_vk_mem_free_device_memory((VkDeviceMemory)obj);
  18096. }
  18097. _SOKOL_PRIVATE void _sg_vk_buffer_destructor(void* obj) {
  18098. SOKOL_ASSERT(_sg.vk.dev && obj);
  18099. vkDestroyBuffer(_sg.vk.dev, (VkBuffer)obj, 0);
  18100. }
  18101. _SOKOL_PRIVATE void _sg_vk_image_destructor(void* obj) {
  18102. SOKOL_ASSERT(_sg.vk.dev && obj);
  18103. vkDestroyImage(_sg.vk.dev, (VkImage)obj, 0);
  18104. }
  18105. _SOKOL_PRIVATE void _sg_vk_image_view_destructor(void* obj) {
  18106. SOKOL_ASSERT(_sg.vk.dev && obj);
  18107. vkDestroyImageView(_sg.vk.dev, (VkImageView)obj, 0);
  18108. }
  18109. _SOKOL_PRIVATE void _sg_vk_sampler_destructor(void* obj) {
  18110. SOKOL_ASSERT(_sg.vk.dev && obj);
  18111. vkDestroySampler(_sg.vk.dev, (VkSampler)obj, 0);
  18112. }
  18113. _SOKOL_PRIVATE void _sg_vk_shader_module_destructor(void* obj) {
  18114. SOKOL_ASSERT(_sg.vk.dev && obj);
  18115. vkDestroyShaderModule(_sg.vk.dev, (VkShaderModule)obj, 0);
  18116. }
  18117. _SOKOL_PRIVATE void _sg_vk_pipelinelayout_destructor(void* obj) {
  18118. SOKOL_ASSERT(_sg.vk.dev && obj);
  18119. vkDestroyPipelineLayout(_sg.vk.dev, (VkPipelineLayout)obj, 0);
  18120. }
  18121. _SOKOL_PRIVATE void _sg_vk_descriptorsetlayout_destructor(void* obj) {
  18122. SOKOL_ASSERT(_sg.vk.dev && obj);
  18123. vkDestroyDescriptorSetLayout(_sg.vk.dev, (VkDescriptorSetLayout)obj, 0);
  18124. }
  18125. _SOKOL_PRIVATE void _sg_vk_pipeline_destructor(void* obj) {
  18126. SOKOL_ASSERT(_sg.vk.dev && obj);
  18127. vkDestroyPipeline(_sg.vk.dev, (VkPipeline)obj, 0);
  18128. }
  18129. _SOKOL_PRIVATE VkBufferUsageFlags _sg_vk_buffer_usage(const sg_buffer_usage* usg) {
  18130. VkBufferUsageFlags res = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
  18131. if (usg->vertex_buffer) {
  18132. res |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
  18133. }
  18134. if (usg->index_buffer) {
  18135. res |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
  18136. }
  18137. if (usg->storage_buffer) {
  18138. res |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT | VK_BUFFER_USAGE_SHADER_DEVICE_ADDRESS_BIT;
  18139. }
  18140. return res;
  18141. }
  18142. _SOKOL_PRIVATE VkVertexInputRate _sg_vk_vertex_input_rate(sg_vertex_step s) {
  18143. return (s == SG_VERTEXSTEP_PER_VERTEX) ? VK_VERTEX_INPUT_RATE_VERTEX : VK_VERTEX_INPUT_RATE_INSTANCE;
  18144. }
  18145. _SOKOL_PRIVATE VkFormat _sg_vk_vertex_format(sg_vertex_format f) {
  18146. switch (f) {
  18147. case SG_VERTEXFORMAT_FLOAT: return VK_FORMAT_R32_SFLOAT;
  18148. case SG_VERTEXFORMAT_FLOAT2: return VK_FORMAT_R32G32_SFLOAT;
  18149. case SG_VERTEXFORMAT_FLOAT3: return VK_FORMAT_R32G32B32_SFLOAT;
  18150. case SG_VERTEXFORMAT_FLOAT4: return VK_FORMAT_R32G32B32A32_SFLOAT;
  18151. case SG_VERTEXFORMAT_INT: return VK_FORMAT_R32_SINT;
  18152. case SG_VERTEXFORMAT_INT2: return VK_FORMAT_R32G32_SINT;
  18153. case SG_VERTEXFORMAT_INT3: return VK_FORMAT_R32G32B32_SINT;
  18154. case SG_VERTEXFORMAT_INT4: return VK_FORMAT_R32G32B32A32_SINT;
  18155. case SG_VERTEXFORMAT_UINT: return VK_FORMAT_R32_UINT;
  18156. case SG_VERTEXFORMAT_UINT2: return VK_FORMAT_R32G32_UINT;
  18157. case SG_VERTEXFORMAT_UINT3: return VK_FORMAT_R32G32B32_UINT;
  18158. case SG_VERTEXFORMAT_UINT4: return VK_FORMAT_R32G32B32A32_UINT;
  18159. case SG_VERTEXFORMAT_BYTE4: return VK_FORMAT_R8G8B8A8_SINT;
  18160. case SG_VERTEXFORMAT_BYTE4N: return VK_FORMAT_R8G8B8A8_SNORM;
  18161. case SG_VERTEXFORMAT_UBYTE4: return VK_FORMAT_R8G8B8A8_UINT;
  18162. case SG_VERTEXFORMAT_UBYTE4N: return VK_FORMAT_R8G8B8A8_UNORM;
  18163. case SG_VERTEXFORMAT_SHORT2: return VK_FORMAT_R16G16_SINT;
  18164. case SG_VERTEXFORMAT_SHORT2N: return VK_FORMAT_R16G16_SNORM;
  18165. case SG_VERTEXFORMAT_USHORT2: return VK_FORMAT_R16G16_UINT;
  18166. case SG_VERTEXFORMAT_USHORT2N: return VK_FORMAT_R16G16_UNORM;
  18167. case SG_VERTEXFORMAT_SHORT4: return VK_FORMAT_R16G16B16A16_SINT;
  18168. case SG_VERTEXFORMAT_SHORT4N: return VK_FORMAT_R16G16B16A16_SNORM;
  18169. case SG_VERTEXFORMAT_USHORT4: return VK_FORMAT_R16G16B16A16_UINT;
  18170. case SG_VERTEXFORMAT_USHORT4N: return VK_FORMAT_R16G16B16A16_UNORM;
  18171. case SG_VERTEXFORMAT_UINT10_N2: return VK_FORMAT_A2R10G10B10_UNORM_PACK32;
  18172. case SG_VERTEXFORMAT_HALF2: return VK_FORMAT_R16G16_SFLOAT;
  18173. case SG_VERTEXFORMAT_HALF4: return VK_FORMAT_R16G16B16A16_SFLOAT;
  18174. default:
  18175. SOKOL_UNREACHABLE;
  18176. return VK_FORMAT_UNDEFINED;
  18177. }
  18178. }
  18179. _SOKOL_PRIVATE VkImageCreateFlags _sg_vk_image_create_flags(sg_image_type t) {
  18180. switch (t) {
  18181. case SG_IMAGETYPE_2D: return 0;
  18182. case SG_IMAGETYPE_CUBE: return VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
  18183. // FIXME: VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT needed for render to slice?
  18184. case SG_IMAGETYPE_3D: return 0;
  18185. case SG_IMAGETYPE_ARRAY: return 0;
  18186. default:
  18187. SOKOL_UNREACHABLE;
  18188. return 0;
  18189. }
  18190. }
  18191. _SOKOL_PRIVATE VkImageType _sg_vk_image_type(sg_image_type t) {
  18192. return (SG_IMAGETYPE_3D == t) ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D;
  18193. }
  18194. _SOKOL_PRIVATE VkImageUsageFlags _sg_vk_image_usage(const sg_image_usage* usg) {
  18195. VkImageUsageFlags res = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
  18196. res |= VK_IMAGE_USAGE_SAMPLED_BIT;
  18197. if (usg->storage_image) {
  18198. res |= VK_IMAGE_USAGE_STORAGE_BIT;
  18199. }
  18200. if (usg->color_attachment || usg->resolve_attachment) {
  18201. res |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  18202. }
  18203. if (usg->depth_stencil_attachment) {
  18204. res |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  18205. }
  18206. return res;
  18207. }
  18208. _SOKOL_PRIVATE VkFormat _sg_vk_format(sg_pixel_format fmt) {
  18209. switch (fmt) {
  18210. case SG_PIXELFORMAT_NONE: return VK_FORMAT_UNDEFINED;
  18211. case SG_PIXELFORMAT_R8: return VK_FORMAT_R8_UNORM;
  18212. case SG_PIXELFORMAT_R8SN: return VK_FORMAT_R8_SNORM;
  18213. case SG_PIXELFORMAT_R8UI: return VK_FORMAT_R8_UINT;
  18214. case SG_PIXELFORMAT_R8SI: return VK_FORMAT_R8_SINT;
  18215. case SG_PIXELFORMAT_R16: return VK_FORMAT_R16_UNORM;
  18216. case SG_PIXELFORMAT_R16SN: return VK_FORMAT_R16_SNORM;
  18217. case SG_PIXELFORMAT_R16UI: return VK_FORMAT_R16_UINT;
  18218. case SG_PIXELFORMAT_R16SI: return VK_FORMAT_R16_SINT;
  18219. case SG_PIXELFORMAT_R16F: return VK_FORMAT_R16_SFLOAT;
  18220. case SG_PIXELFORMAT_RG8: return VK_FORMAT_R8G8_UNORM;
  18221. case SG_PIXELFORMAT_RG8SN: return VK_FORMAT_R8G8_SNORM;
  18222. case SG_PIXELFORMAT_RG8UI: return VK_FORMAT_R8G8_UINT;
  18223. case SG_PIXELFORMAT_RG8SI: return VK_FORMAT_R8G8_SINT;
  18224. case SG_PIXELFORMAT_R32UI: return VK_FORMAT_R32_UINT;
  18225. case SG_PIXELFORMAT_R32SI: return VK_FORMAT_R32_SINT;
  18226. case SG_PIXELFORMAT_R32F: return VK_FORMAT_R32_SFLOAT;
  18227. case SG_PIXELFORMAT_RG16: return VK_FORMAT_R16G16_UNORM;
  18228. case SG_PIXELFORMAT_RG16SN: return VK_FORMAT_R16G16_SNORM;
  18229. case SG_PIXELFORMAT_RG16UI: return VK_FORMAT_R16G16_UINT;
  18230. case SG_PIXELFORMAT_RG16SI: return VK_FORMAT_R16G16_SINT;
  18231. case SG_PIXELFORMAT_RG16F: return VK_FORMAT_R16G16_SFLOAT;
  18232. case SG_PIXELFORMAT_RGBA8: return VK_FORMAT_R8G8B8A8_UNORM;
  18233. case SG_PIXELFORMAT_SRGB8A8: return VK_FORMAT_R8G8B8A8_SRGB;
  18234. case SG_PIXELFORMAT_RGBA8SN: return VK_FORMAT_R8G8B8A8_SNORM;
  18235. case SG_PIXELFORMAT_RGBA8UI: return VK_FORMAT_R8G8B8A8_UINT;
  18236. case SG_PIXELFORMAT_RGBA8SI: return VK_FORMAT_R8G8B8A8_SINT;
  18237. case SG_PIXELFORMAT_BGRA8: return VK_FORMAT_B8G8R8A8_UNORM;
  18238. case SG_PIXELFORMAT_RGB10A2: return VK_FORMAT_A2R10G10B10_UNORM_PACK32;
  18239. case SG_PIXELFORMAT_RG11B10F: return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
  18240. case SG_PIXELFORMAT_RGB9E5: return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
  18241. case SG_PIXELFORMAT_RG32UI: return VK_FORMAT_R32G32_UINT;
  18242. case SG_PIXELFORMAT_RG32SI: return VK_FORMAT_R32G32_SINT;
  18243. case SG_PIXELFORMAT_RG32F: return VK_FORMAT_R32G32_SFLOAT;
  18244. case SG_PIXELFORMAT_RGBA16: return VK_FORMAT_R16G16B16A16_UNORM;
  18245. case SG_PIXELFORMAT_RGBA16SN: return VK_FORMAT_R16G16B16A16_SNORM;
  18246. case SG_PIXELFORMAT_RGBA16UI: return VK_FORMAT_R16G16B16A16_UINT;
  18247. case SG_PIXELFORMAT_RGBA16SI: return VK_FORMAT_R16G16B16A16_SINT;
  18248. case SG_PIXELFORMAT_RGBA16F: return VK_FORMAT_R16G16B16A16_SFLOAT;
  18249. case SG_PIXELFORMAT_RGBA32UI: return VK_FORMAT_R32G32B32A32_UINT;
  18250. case SG_PIXELFORMAT_RGBA32SI: return VK_FORMAT_R32G32B32A32_SINT;
  18251. case SG_PIXELFORMAT_RGBA32F: return VK_FORMAT_R32G32B32A32_SFLOAT;
  18252. case SG_PIXELFORMAT_DEPTH: return VK_FORMAT_D32_SFLOAT;
  18253. case SG_PIXELFORMAT_DEPTH_STENCIL: return VK_FORMAT_D32_SFLOAT_S8_UINT;
  18254. case SG_PIXELFORMAT_BC1_RGBA: return VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
  18255. case SG_PIXELFORMAT_BC2_RGBA: return VK_FORMAT_BC2_UNORM_BLOCK;
  18256. case SG_PIXELFORMAT_BC3_RGBA: return VK_FORMAT_BC3_UNORM_BLOCK;
  18257. case SG_PIXELFORMAT_BC3_SRGBA: return VK_FORMAT_BC3_SRGB_BLOCK;
  18258. case SG_PIXELFORMAT_BC4_R: return VK_FORMAT_BC4_UNORM_BLOCK;
  18259. case SG_PIXELFORMAT_BC4_RSN: return VK_FORMAT_BC4_SNORM_BLOCK;
  18260. case SG_PIXELFORMAT_BC5_RG: return VK_FORMAT_BC5_UNORM_BLOCK;
  18261. case SG_PIXELFORMAT_BC5_RGSN: return VK_FORMAT_BC5_SNORM_BLOCK;
  18262. case SG_PIXELFORMAT_BC6H_RGBF: return VK_FORMAT_BC6H_SFLOAT_BLOCK;
  18263. case SG_PIXELFORMAT_BC6H_RGBUF: return VK_FORMAT_BC6H_UFLOAT_BLOCK;
  18264. case SG_PIXELFORMAT_BC7_RGBA: return VK_FORMAT_BC7_UNORM_BLOCK;
  18265. case SG_PIXELFORMAT_BC7_SRGBA: return VK_FORMAT_BC7_SRGB_BLOCK;
  18266. case SG_PIXELFORMAT_ETC2_RGB8: return VK_FORMAT_ETC2_R8G8B8_UNORM_BLOCK;
  18267. case SG_PIXELFORMAT_ETC2_RGB8A1: return VK_FORMAT_ETC2_R8G8B8A1_UNORM_BLOCK;
  18268. case SG_PIXELFORMAT_ETC2_RGBA8: return VK_FORMAT_ETC2_R8G8B8A8_UNORM_BLOCK;
  18269. case SG_PIXELFORMAT_ETC2_SRGB8: return VK_FORMAT_ETC2_R8G8B8_SRGB_BLOCK;
  18270. case SG_PIXELFORMAT_ETC2_SRGB8A8: return VK_FORMAT_ETC2_R8G8B8A8_SRGB_BLOCK;
  18271. case SG_PIXELFORMAT_EAC_R11: return VK_FORMAT_EAC_R11_UNORM_BLOCK;
  18272. case SG_PIXELFORMAT_EAC_R11SN: return VK_FORMAT_EAC_R11_SNORM_BLOCK;
  18273. case SG_PIXELFORMAT_EAC_RG11: return VK_FORMAT_EAC_R11G11_UNORM_BLOCK;
  18274. case SG_PIXELFORMAT_EAC_RG11SN: return VK_FORMAT_EAC_R11G11_SNORM_BLOCK;
  18275. case SG_PIXELFORMAT_ASTC_4x4_RGBA: return VK_FORMAT_ASTC_4x4_UNORM_BLOCK;
  18276. case SG_PIXELFORMAT_ASTC_4x4_SRGBA: return VK_FORMAT_ASTC_4x4_SRGB_BLOCK;
  18277. default: return VK_FORMAT_UNDEFINED;
  18278. };
  18279. }
  18280. _SOKOL_PRIVATE VkPrimitiveTopology _sg_vk_primitive_topology(sg_primitive_type t) {
  18281. switch (t) {
  18282. case SG_PRIMITIVETYPE_POINTS: return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
  18283. case SG_PRIMITIVETYPE_LINES: return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
  18284. case SG_PRIMITIVETYPE_LINE_STRIP: return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
  18285. case SG_PRIMITIVETYPE_TRIANGLES: return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
  18286. case SG_PRIMITIVETYPE_TRIANGLE_STRIP: return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
  18287. default:
  18288. SOKOL_UNREACHABLE;
  18289. return VK_PRIMITIVE_TOPOLOGY_MAX_ENUM;
  18290. }
  18291. }
  18292. _SOKOL_PRIVATE VkCullModeFlags _sg_vk_cullmode(sg_cull_mode cm) {
  18293. switch (cm) {
  18294. case SG_CULLMODE_NONE: return VK_CULL_MODE_NONE;
  18295. case SG_CULLMODE_FRONT: return VK_CULL_MODE_FRONT_BIT;
  18296. case SG_CULLMODE_BACK: return VK_CULL_MODE_BACK_BIT;
  18297. default:
  18298. SOKOL_UNREACHABLE;
  18299. return VK_CULL_MODE_NONE;
  18300. }
  18301. }
  18302. _SOKOL_PRIVATE VkFrontFace _sg_vk_frontface(sg_face_winding fw) {
  18303. return (fw == SG_FACEWINDING_CCW) ? VK_FRONT_FACE_COUNTER_CLOCKWISE : VK_FRONT_FACE_CLOCKWISE;
  18304. }
  18305. _SOKOL_PRIVATE VkCompareOp _sg_vk_compare_op(sg_compare_func f) {
  18306. switch (f) {
  18307. case SG_COMPAREFUNC_NEVER: return VK_COMPARE_OP_NEVER;
  18308. case SG_COMPAREFUNC_LESS: return VK_COMPARE_OP_LESS;
  18309. case SG_COMPAREFUNC_EQUAL: return VK_COMPARE_OP_EQUAL;
  18310. case SG_COMPAREFUNC_LESS_EQUAL: return VK_COMPARE_OP_LESS_OR_EQUAL;
  18311. case SG_COMPAREFUNC_GREATER: return VK_COMPARE_OP_GREATER;
  18312. case SG_COMPAREFUNC_NOT_EQUAL: return VK_COMPARE_OP_NOT_EQUAL;
  18313. case SG_COMPAREFUNC_GREATER_EQUAL: return VK_COMPARE_OP_GREATER_OR_EQUAL;
  18314. case SG_COMPAREFUNC_ALWAYS: return VK_COMPARE_OP_ALWAYS;
  18315. default:
  18316. SOKOL_UNREACHABLE;
  18317. return VK_COMPARE_OP_ALWAYS;
  18318. }
  18319. }
  18320. _SOKOL_PRIVATE VkStencilOp _sg_vk_stencil_op(sg_stencil_op op) {
  18321. switch (op) {
  18322. case SG_STENCILOP_KEEP: return VK_STENCIL_OP_KEEP;
  18323. case SG_STENCILOP_ZERO: return VK_STENCIL_OP_ZERO;
  18324. case SG_STENCILOP_REPLACE: return VK_STENCIL_OP_REPLACE;
  18325. case SG_STENCILOP_INCR_CLAMP: return VK_STENCIL_OP_INCREMENT_AND_CLAMP;
  18326. case SG_STENCILOP_DECR_CLAMP: return VK_STENCIL_OP_DECREMENT_AND_CLAMP;
  18327. case SG_STENCILOP_INVERT: return VK_STENCIL_OP_INVERT;
  18328. case SG_STENCILOP_INCR_WRAP: return VK_STENCIL_OP_INCREMENT_AND_WRAP;
  18329. case SG_STENCILOP_DECR_WRAP: return VK_STENCIL_OP_DECREMENT_AND_WRAP;
  18330. default:
  18331. SOKOL_UNREACHABLE;
  18332. return VK_STENCIL_OP_KEEP;
  18333. }
  18334. }
  18335. _SOKOL_PRIVATE VkBlendOp _sg_vk_blend_op(sg_blend_op op) {
  18336. switch (op) {
  18337. case SG_BLENDOP_ADD: return VK_BLEND_OP_ADD;
  18338. case SG_BLENDOP_SUBTRACT: return VK_BLEND_OP_SUBTRACT;
  18339. case SG_BLENDOP_REVERSE_SUBTRACT: return VK_BLEND_OP_REVERSE_SUBTRACT;
  18340. case SG_BLENDOP_MIN: return VK_BLEND_OP_MIN;
  18341. case SG_BLENDOP_MAX: return VK_BLEND_OP_MAX;
  18342. default:
  18343. SOKOL_UNREACHABLE;
  18344. return VK_BLEND_OP_ADD;
  18345. }
  18346. }
  18347. _SOKOL_PRIVATE VkBlendFactor _sg_vk_blend_factor(sg_blend_factor f) {
  18348. switch (f) {
  18349. case SG_BLENDFACTOR_ZERO: return VK_BLEND_FACTOR_ZERO;
  18350. case SG_BLENDFACTOR_ONE: return VK_BLEND_FACTOR_ONE;
  18351. case SG_BLENDFACTOR_SRC_COLOR: return VK_BLEND_FACTOR_SRC_COLOR;
  18352. case SG_BLENDFACTOR_ONE_MINUS_SRC_COLOR: return VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
  18353. case SG_BLENDFACTOR_SRC_ALPHA: return VK_BLEND_FACTOR_SRC_ALPHA;
  18354. case SG_BLENDFACTOR_ONE_MINUS_SRC_ALPHA: return VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
  18355. case SG_BLENDFACTOR_DST_COLOR: return VK_BLEND_FACTOR_DST_COLOR;
  18356. case SG_BLENDFACTOR_ONE_MINUS_DST_COLOR: return VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR;
  18357. case SG_BLENDFACTOR_DST_ALPHA: return VK_BLEND_FACTOR_DST_ALPHA;
  18358. case SG_BLENDFACTOR_ONE_MINUS_DST_ALPHA: return VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA;
  18359. case SG_BLENDFACTOR_SRC_ALPHA_SATURATED: return VK_BLEND_FACTOR_SRC_ALPHA_SATURATE;
  18360. case SG_BLENDFACTOR_BLEND_COLOR: return VK_BLEND_FACTOR_CONSTANT_COLOR;
  18361. case SG_BLENDFACTOR_ONE_MINUS_BLEND_COLOR: return VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR;
  18362. case SG_BLENDFACTOR_BLEND_ALPHA: return VK_BLEND_FACTOR_CONSTANT_ALPHA;
  18363. case SG_BLENDFACTOR_ONE_MINUS_BLEND_ALPHA: return VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA;
  18364. default:
  18365. SOKOL_UNREACHABLE;
  18366. return VK_BLEND_FACTOR_ONE;
  18367. }
  18368. }
  18369. _SOKOL_PRIVATE VkColorComponentFlags _sg_vk_color_write_mask(sg_color_mask m) {
  18370. int res = 0;
  18371. if (0 != (m & SG_COLORMASK_R)) {
  18372. res |= (int)VK_COLOR_COMPONENT_R_BIT;
  18373. }
  18374. if (0 != (m & SG_COLORMASK_G)) {
  18375. res |= (int)VK_COLOR_COMPONENT_G_BIT;
  18376. }
  18377. if (0 != (m & SG_COLORMASK_B)) {
  18378. res |= (int)VK_COLOR_COMPONENT_B_BIT;
  18379. }
  18380. if (0 != (m & SG_COLORMASK_A)) {
  18381. res |= (int)VK_COLOR_COMPONENT_A_BIT;
  18382. }
  18383. return (VkColorComponentFlags)res;
  18384. }
  18385. _SOKOL_PRIVATE VkShaderStageFlags _sg_vk_shader_stage(sg_shader_stage s) {
  18386. switch (s) {
  18387. case SG_SHADERSTAGE_VERTEX: return VK_SHADER_STAGE_VERTEX_BIT;
  18388. case SG_SHADERSTAGE_FRAGMENT: return VK_SHADER_STAGE_FRAGMENT_BIT;
  18389. case SG_SHADERSTAGE_COMPUTE: return VK_SHADER_STAGE_COMPUTE_BIT;
  18390. default: SOKOL_UNREACHABLE; return 0;
  18391. }
  18392. }
  18393. _SOKOL_PRIVATE VkAttachmentLoadOp _sg_vk_load_op(sg_load_action a) {
  18394. switch (a) {
  18395. case SG_LOADACTION_CLEAR:
  18396. return VK_ATTACHMENT_LOAD_OP_CLEAR;
  18397. case SG_LOADACTION_DONTCARE:
  18398. return VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  18399. default:
  18400. return VK_ATTACHMENT_LOAD_OP_LOAD;
  18401. }
  18402. }
  18403. _SOKOL_PRIVATE VkAttachmentStoreOp _sg_vk_store_op(sg_store_action a) {
  18404. switch (a) {
  18405. case SG_STOREACTION_STORE:
  18406. return VK_ATTACHMENT_STORE_OP_STORE;
  18407. default:
  18408. return VK_ATTACHMENT_STORE_OP_DONT_CARE;
  18409. }
  18410. }
  18411. _SOKOL_PRIVATE VkIndexType _sg_vk_index_type(sg_index_type t) {
  18412. return (t == SG_INDEXTYPE_UINT16) ? VK_INDEX_TYPE_UINT16 : VK_INDEX_TYPE_UINT32;
  18413. }
  18414. _SOKOL_PRIVATE VkImageViewType _sg_vk_texture_image_view_type(sg_image_type t) {
  18415. switch (t) {
  18416. case SG_IMAGETYPE_2D: return VK_IMAGE_VIEW_TYPE_2D;
  18417. case SG_IMAGETYPE_CUBE: return VK_IMAGE_VIEW_TYPE_CUBE;
  18418. case SG_IMAGETYPE_3D: return VK_IMAGE_VIEW_TYPE_3D;
  18419. case SG_IMAGETYPE_ARRAY: return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
  18420. default: SOKOL_UNREACHABLE; return VK_IMAGE_VIEW_TYPE_2D;
  18421. }
  18422. }
  18423. _SOKOL_PRIVATE VkImageViewType _sg_vk_attachment_image_view_type(sg_image_type t) {
  18424. switch (t) {
  18425. case SG_IMAGETYPE_2D: return VK_IMAGE_VIEW_TYPE_2D;
  18426. case SG_IMAGETYPE_CUBE: return VK_IMAGE_VIEW_TYPE_2D_ARRAY; // not a bug
  18427. case SG_IMAGETYPE_3D: return VK_IMAGE_VIEW_TYPE_2D; // not a bug
  18428. case SG_IMAGETYPE_ARRAY: return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
  18429. default: SOKOL_UNREACHABLE; return VK_IMAGE_VIEW_TYPE_2D;
  18430. }
  18431. }
  18432. _SOKOL_PRIVATE VkFilter _sg_vk_sampler_minmag_filter(sg_filter f) {
  18433. switch (f) {
  18434. case SG_FILTER_NEAREST: return VK_FILTER_NEAREST;
  18435. case SG_FILTER_LINEAR: return VK_FILTER_LINEAR;
  18436. default: SOKOL_UNREACHABLE; return VK_FILTER_NEAREST;
  18437. }
  18438. }
  18439. _SOKOL_PRIVATE VkSamplerMipmapMode _sg_vk_sampler_mipmap_mode(sg_filter f) {
  18440. switch (f) {
  18441. case SG_FILTER_NEAREST: return VK_SAMPLER_MIPMAP_MODE_NEAREST;
  18442. case SG_FILTER_LINEAR: return VK_SAMPLER_MIPMAP_MODE_LINEAR;
  18443. default: SOKOL_UNREACHABLE; return VK_SAMPLER_MIPMAP_MODE_NEAREST;
  18444. }
  18445. }
  18446. _SOKOL_PRIVATE VkSamplerAddressMode _sg_vk_sampler_address_mode(sg_wrap w) {
  18447. switch (w) {
  18448. case SG_WRAP_REPEAT: return VK_SAMPLER_ADDRESS_MODE_REPEAT;
  18449. case SG_WRAP_CLAMP_TO_EDGE: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
  18450. case SG_WRAP_CLAMP_TO_BORDER: return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
  18451. case SG_WRAP_MIRRORED_REPEAT: return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
  18452. default: SOKOL_UNREACHABLE; return VK_SAMPLER_ADDRESS_MODE_REPEAT;
  18453. }
  18454. }
  18455. _SOKOL_PRIVATE VkBorderColor _sg_vk_sampler_border_color(sg_border_color c) {
  18456. switch (c) {
  18457. case SG_BORDERCOLOR_TRANSPARENT_BLACK: return VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK;
  18458. case SG_BORDERCOLOR_OPAQUE_BLACK: return VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
  18459. case SG_BORDERCOLOR_OPAQUE_WHITE: return VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
  18460. default: SOKOL_UNREACHABLE; return VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK;
  18461. }
  18462. }
  18463. _SOKOL_PRIVATE void _sg_vk_load_ext_funcs(void) {
  18464. SOKOL_ASSERT(_sg.vk.dev);
  18465. _sg.vk.ext.get_descriptor_set_layout_size = (PFN_vkGetDescriptorSetLayoutSizeEXT)vkGetDeviceProcAddr(_sg.vk.dev, "vkGetDescriptorSetLayoutSizeEXT");
  18466. if (0 == _sg.vk.ext.get_descriptor_set_layout_size) {
  18467. _SG_PANIC(VULKAN_REQUIRED_EXTENSION_FUNCTION_MISSING);
  18468. }
  18469. _sg.vk.ext.get_descriptor_set_layout_binding_offset = (PFN_vkGetDescriptorSetLayoutBindingOffsetEXT)vkGetDeviceProcAddr(_sg.vk.dev, "vkGetDescriptorSetLayoutBindingOffsetEXT");
  18470. if (0 == _sg.vk.ext.get_descriptor_set_layout_binding_offset) {
  18471. _SG_PANIC(VULKAN_REQUIRED_EXTENSION_FUNCTION_MISSING);
  18472. }
  18473. _sg.vk.ext.get_descriptor = (PFN_vkGetDescriptorEXT)vkGetDeviceProcAddr(_sg.vk.dev, "vkGetDescriptorEXT");
  18474. if (0 == _sg.vk.ext.get_descriptor) {
  18475. _SG_PANIC(VULKAN_REQUIRED_EXTENSION_FUNCTION_MISSING);
  18476. }
  18477. _sg.vk.ext.cmd_bind_descriptor_buffers = (PFN_vkCmdBindDescriptorBuffersEXT)vkGetDeviceProcAddr(_sg.vk.dev, "vkCmdBindDescriptorBuffersEXT");
  18478. if (0 == _sg.vk.ext.cmd_bind_descriptor_buffers) {
  18479. _SG_PANIC(VULKAN_REQUIRED_EXTENSION_FUNCTION_MISSING);
  18480. }
  18481. _sg.vk.ext.cmd_set_descriptor_buffer_offsets = (PFN_vkCmdSetDescriptorBufferOffsetsEXT)vkGetDeviceProcAddr(_sg.vk.dev, "vkCmdSetDescriptorBufferOffsetsEXT");
  18482. if (0 == _sg.vk.ext.cmd_set_descriptor_buffer_offsets) {
  18483. _SG_PANIC(VULKAN_REQUIRED_EXTENSION_FUNCTION_MISSING);
  18484. }
  18485. }
  18486. _SOKOL_PRIVATE void _sg_vk_init_caps(void) {
  18487. _sg.backend = SG_BACKEND_VULKAN;
  18488. _sg.features.origin_top_left = true;
  18489. _sg.features.image_clamp_to_border = false; // FIXME?
  18490. _sg.features.mrt_independent_blend_state = true;
  18491. _sg.features.mrt_independent_write_mask = true;
  18492. _sg.features.compute = true;
  18493. _sg.features.msaa_texture_bindings = true;
  18494. _sg.features.draw_base_vertex = true;
  18495. _sg.features.draw_base_instance = true;
  18496. SOKOL_ASSERT(_sg.vk.phys_dev);
  18497. _sg.vk.descriptor_buffer_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_BUFFER_PROPERTIES_EXT;
  18498. _sg.vk.dev_props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
  18499. _sg.vk.dev_props.pNext = &_sg.vk.descriptor_buffer_props;
  18500. vkGetPhysicalDeviceProperties2(_sg.vk.phys_dev, &_sg.vk.dev_props);
  18501. _sg.vk.dev_features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
  18502. vkGetPhysicalDeviceFeatures2(_sg.vk.phys_dev, &_sg.vk.dev_features);
  18503. const VkPhysicalDeviceLimits* l = &_sg.vk.dev_props.properties.limits;
  18504. _sg.limits.max_image_size_2d = (int)l->maxImageDimension2D;
  18505. _sg.limits.max_image_size_cube = (int)l->maxImageDimensionCube;
  18506. _sg.limits.max_image_size_3d = (int)l->maxImageDimension3D;
  18507. _sg.limits.max_image_size_array = _sg.limits.max_image_size_2d;
  18508. _sg.limits.max_image_array_layers = (int)l->maxImageArrayLayers;
  18509. _sg.limits.max_vertex_attrs = _sg_min((int)l->maxVertexInputAttributes, SG_MAX_VERTEX_ATTRIBUTES);
  18510. _sg.limits.max_color_attachments = _sg_min((int)l->maxFragmentOutputAttachments, SG_MAX_COLOR_ATTACHMENTS);
  18511. _sg.limits.max_texture_bindings_per_stage = _sg_min((int)l->maxPerStageDescriptorSampledImages, SG_MAX_VIEW_BINDSLOTS);
  18512. _sg.limits.max_storage_buffer_bindings_per_stage = _sg_min((int)l->maxPerStageDescriptorStorageBuffers, SG_MAX_VIEW_BINDSLOTS);
  18513. _sg.limits.max_storage_image_bindings_per_stage = _sg_min((int)l->maxPerStageDescriptorStorageImages, SG_MAX_VIEW_BINDSLOTS);
  18514. _sg.limits.vk_min_uniform_buffer_offset_alignment = (int)l->minUniformBufferOffsetAlignment;
  18515. // FIXME: currently these are the same as in the WebGPU backend
  18516. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R8]);
  18517. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG8]);
  18518. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA8]);
  18519. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_SRGB8A8]);
  18520. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_BGRA8]);
  18521. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_R16F]);
  18522. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RG16F]);
  18523. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGBA16F]);
  18524. _sg_pixelformat_all(&_sg.formats[SG_PIXELFORMAT_RGB10A2]);
  18525. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_R8SN]);
  18526. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RG8SN]);
  18527. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RGBA8SN]);
  18528. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RG11B10F]);
  18529. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R8UI]);
  18530. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R8SI]);
  18531. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG8UI]);
  18532. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG8SI]);
  18533. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA8UI]);
  18534. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA8SI]);
  18535. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R16UI]);
  18536. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R16SI]);
  18537. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG16UI]);
  18538. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG16SI]);
  18539. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA16UI]);
  18540. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA16SI]);
  18541. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R32UI]);
  18542. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_R32SI]);
  18543. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG32UI]);
  18544. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RG32SI]);
  18545. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA32UI]);
  18546. _sg_pixelformat_sr(&_sg.formats[SG_PIXELFORMAT_RGBA32SI]);
  18547. _sg_pixelformat_sfr(&_sg.formats[SG_PIXELFORMAT_R32F]);
  18548. _sg_pixelformat_sfr(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  18549. _sg_pixelformat_sfr(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  18550. _sg_pixelformat_srmd(&_sg.formats[SG_PIXELFORMAT_DEPTH]);
  18551. _sg_pixelformat_srmd(&_sg.formats[SG_PIXELFORMAT_DEPTH_STENCIL]);
  18552. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_RGB9E5]);
  18553. if (_sg.vk.dev_features.features.textureCompressionBC) {
  18554. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC1_RGBA]);
  18555. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC2_RGBA]);
  18556. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC3_RGBA]);
  18557. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC3_SRGBA]);
  18558. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC4_R]);
  18559. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC4_RSN]);
  18560. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC5_RG]);
  18561. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC5_RGSN]);
  18562. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC6H_RGBF]);
  18563. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC6H_RGBUF]);
  18564. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC7_RGBA]);
  18565. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_BC7_SRGBA]);
  18566. }
  18567. if (_sg.vk.dev_features.features.textureCompressionETC2) {
  18568. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGB8]);
  18569. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_SRGB8]);
  18570. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGB8A1]);
  18571. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_RGBA8]);
  18572. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ETC2_SRGB8A8]);
  18573. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_R11]);
  18574. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_R11SN]);
  18575. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_RG11]);
  18576. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_EAC_RG11SN]);
  18577. }
  18578. if (_sg.vk.dev_features.features.textureCompressionASTC_LDR) {
  18579. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ASTC_4x4_RGBA]);
  18580. _sg_pixelformat_sf(&_sg.formats[SG_PIXELFORMAT_ASTC_4x4_SRGBA]);
  18581. }
  18582. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8]);
  18583. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8SN]);
  18584. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8UI]);
  18585. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA8SI]);
  18586. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16UI]);
  18587. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16SI]);
  18588. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA16F]);
  18589. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32UI]);
  18590. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32SI]);
  18591. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_R32F]);
  18592. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32UI]);
  18593. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32SI]);
  18594. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RG32F]);
  18595. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32UI]);
  18596. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32SI]);
  18597. _sg_pixelformat_compute_all(&_sg.formats[SG_PIXELFORMAT_RGBA32F]);
  18598. }
  18599. _SOKOL_PRIVATE void _sg_vk_create_fences(void) {
  18600. SOKOL_ASSERT(_sg.vk.dev);
  18601. _SG_STRUCT(VkFenceCreateInfo, create_info);
  18602. create_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
  18603. create_info.flags = VK_FENCE_CREATE_SIGNALED_BIT;
  18604. for (size_t i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  18605. SOKOL_ASSERT(0 == _sg.vk.frame.slot[i].fence);
  18606. VkResult res = vkCreateFence(_sg.vk.dev, &create_info, 0, &_sg.vk.frame.slot[i].fence);
  18607. SOKOL_ASSERT((res == VK_SUCCESS) && _sg.vk.frame.slot[i].fence); _SOKOL_UNUSED(res);
  18608. }
  18609. }
  18610. _SOKOL_PRIVATE void _sg_vk_destroy_fences(void) {
  18611. SOKOL_ASSERT(_sg.vk.dev);
  18612. for (size_t i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  18613. SOKOL_ASSERT(_sg.vk.frame.slot[i].fence);
  18614. vkDestroyFence(_sg.vk.dev, _sg.vk.frame.slot[i].fence, 0);
  18615. _sg.vk.frame.slot[i].fence = 0;
  18616. }
  18617. }
  18618. _SOKOL_PRIVATE void _sg_vk_create_frame_command_pool_and_buffers(void) {
  18619. SOKOL_ASSERT(_sg.vk.dev);
  18620. SOKOL_ASSERT(0 == _sg.vk.frame.cmd_pool);
  18621. _SG_STRUCT(VkCommandPoolCreateInfo, pool_create_info);
  18622. pool_create_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  18623. // FIXME: transient bit when the cmd buffers are reset each frame?
  18624. pool_create_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  18625. pool_create_info.queueFamilyIndex = _sg.vk.queue_family_index;
  18626. VkResult res = vkCreateCommandPool(_sg.vk.dev, &pool_create_info, 0, &_sg.vk.frame.cmd_pool);
  18627. SOKOL_ASSERT((res == VK_SUCCESS) && _sg.vk.frame.cmd_pool); _SOKOL_UNUSED(res);
  18628. for (size_t i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  18629. _SG_STRUCT(VkCommandBufferAllocateInfo, cmdbuf_alloc_info);
  18630. cmdbuf_alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  18631. cmdbuf_alloc_info.commandPool = _sg.vk.frame.cmd_pool;
  18632. cmdbuf_alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  18633. cmdbuf_alloc_info.commandBufferCount = 1;
  18634. res = vkAllocateCommandBuffers(_sg.vk.dev, &cmdbuf_alloc_info, &_sg.vk.frame.slot[i].command_buffer);
  18635. SOKOL_ASSERT((res == VK_SUCCESS) && _sg.vk.frame.slot[i].command_buffer);
  18636. res = vkAllocateCommandBuffers(_sg.vk.dev, &cmdbuf_alloc_info, &_sg.vk.frame.slot[i].stream_command_buffer);
  18637. SOKOL_ASSERT((res == VK_SUCCESS) && _sg.vk.frame.slot[i].stream_command_buffer);
  18638. }
  18639. }
  18640. _SOKOL_PRIVATE void _sg_vk_destroy_frame_command_pool(void) {
  18641. SOKOL_ASSERT(_sg.vk.dev);
  18642. SOKOL_ASSERT(_sg.vk.frame.cmd_pool);
  18643. SOKOL_ASSERT(0 == _sg.vk.frame.cmd_buf);
  18644. SOKOL_ASSERT(0 == _sg.vk.frame.stream_cmd_buf);
  18645. // NOTE: command buffers owned by the pool will be automatically destroyed
  18646. vkDestroyCommandPool(_sg.vk.dev, _sg.vk.frame.cmd_pool, 0);
  18647. _sg.vk.frame.cmd_pool = 0;
  18648. for (size_t i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  18649. SOKOL_ASSERT(_sg.vk.frame.slot[i].command_buffer);
  18650. _sg.vk.frame.slot[i].command_buffer = 0;
  18651. _sg.vk.frame.slot[i].stream_command_buffer = 0;
  18652. }
  18653. }
  18654. _SOKOL_PRIVATE void _sg_vk_acquire_frame_command_buffers(void) {
  18655. SOKOL_ASSERT(_sg.vk.dev);
  18656. VkResult res;
  18657. if (0 == _sg.vk.frame.cmd_buf) {
  18658. SOKOL_ASSERT(0 == _sg.vk.frame.stream_cmd_buf);
  18659. _sg.vk.frame_slot = (_sg.vk.frame_slot + 1) % SG_NUM_INFLIGHT_FRAMES;
  18660. // block until oldest inflight-frame has finished
  18661. do {
  18662. res = vkWaitForFences(_sg.vk.dev,
  18663. 1,
  18664. &_sg.vk.frame.slot[_sg.vk.frame_slot].fence,
  18665. VK_TRUE,
  18666. UINT64_MAX);
  18667. } while (res == VK_TIMEOUT);
  18668. if (res != VK_SUCCESS) {
  18669. _SG_WARN(VULKAN_WAIT_FOR_FENCE_FAILED);
  18670. _sg.cur_pass.valid = false;
  18671. return;
  18672. }
  18673. VkResult res = vkResetFences(_sg.vk.dev, 1, &_sg.vk.frame.slot[_sg.vk.frame_slot].fence);
  18674. SOKOL_ASSERT(res == VK_SUCCESS); _SOKOL_UNUSED(res);
  18675. _sg_vk_delete_queue_collect();
  18676. _sg.vk.frame.cmd_buf = _sg.vk.frame.slot[_sg.vk.frame_slot].command_buffer;
  18677. res = vkResetCommandBuffer(_sg.vk.frame.cmd_buf, 0);
  18678. SOKOL_ASSERT(res == VK_SUCCESS);
  18679. _sg.vk.frame.stream_cmd_buf = _sg.vk.frame.slot[_sg.vk.frame_slot].stream_command_buffer;
  18680. res = vkResetCommandBuffer(_sg.vk.frame.stream_cmd_buf, 0);
  18681. SOKOL_ASSERT(res == VK_SUCCESS);
  18682. _SG_STRUCT(VkCommandBufferBeginInfo, cmdbuf_begin_info);
  18683. cmdbuf_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  18684. cmdbuf_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  18685. res = vkBeginCommandBuffer(_sg.vk.frame.cmd_buf, &cmdbuf_begin_info);
  18686. SOKOL_ASSERT(res == VK_SUCCESS);
  18687. res = vkBeginCommandBuffer(_sg.vk.frame.stream_cmd_buf, &cmdbuf_begin_info);
  18688. SOKOL_ASSERT(res == VK_SUCCESS);
  18689. _sg_vk_uniform_after_acquire();
  18690. _sg_vk_bind_after_acquire();
  18691. _sg_vk_staging_stream_after_acquire();
  18692. }
  18693. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  18694. }
  18695. _SOKOL_PRIVATE void _sg_vk_submit_frame_command_buffers(void) {
  18696. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  18697. SOKOL_ASSERT(_sg.vk.frame.stream_cmd_buf);
  18698. VkResult res;
  18699. _SOKOL_UNUSED(res);
  18700. _sg_vk_staging_stream_before_submit();
  18701. _sg_vk_bind_before_submit();
  18702. _sg_vk_uniform_before_submit();
  18703. res = vkEndCommandBuffer(_sg.vk.frame.stream_cmd_buf);
  18704. SOKOL_ASSERT(res == VK_SUCCESS);
  18705. res = vkEndCommandBuffer(_sg.vk.frame.cmd_buf);
  18706. SOKOL_ASSERT(res == VK_SUCCESS);
  18707. _SG_STRUCT(VkSubmitInfo, submit_infos[2]);
  18708. // streaming-update command buffer
  18709. submit_infos[0].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  18710. submit_infos[0].commandBufferCount = 1;
  18711. submit_infos[0].pCommandBuffers = &_sg.vk.frame.stream_cmd_buf;
  18712. // render command buffer
  18713. const VkPipelineStageFlags present_wait_dst_stage_mask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
  18714. submit_infos[1].sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  18715. submit_infos[1].waitSemaphoreCount = 1;
  18716. submit_infos[1].pWaitSemaphores = &_sg.vk.present_complete_sem;
  18717. submit_infos[1].pWaitDstStageMask = &present_wait_dst_stage_mask;
  18718. submit_infos[1].commandBufferCount = 1;
  18719. submit_infos[1].pCommandBuffers = &_sg.vk.frame.cmd_buf;
  18720. submit_infos[1].signalSemaphoreCount = 1;
  18721. submit_infos[1].pSignalSemaphores = &_sg.vk.render_finished_sem;
  18722. res = vkQueueSubmit(_sg.vk.queue, 2, submit_infos, _sg.vk.frame.slot[_sg.vk.frame_slot].fence);
  18723. SOKOL_ASSERT(res == VK_SUCCESS);
  18724. _sg.vk.frame.cmd_buf = 0;
  18725. _sg.vk.frame.stream_cmd_buf = 0;
  18726. // NOTE: it's valid to register resource objects for destruction in the
  18727. // delete queue past this point (between _sg_vk_submit_frame_command_buffer()
  18728. // and the next _sg_vk_acquire_frame_command_buffer()) since resources which are
  18729. // destroyed in this 'gap' can at most have been used by the command
  18730. // buffer that was just submitted
  18731. }
  18732. _SOKOL_PRIVATE void _sg_vk_setup_backend(const sg_desc* desc) {
  18733. SOKOL_ASSERT(desc);
  18734. SOKOL_ASSERT(desc->environment.vulkan.physical_device);
  18735. SOKOL_ASSERT(desc->environment.vulkan.device);
  18736. SOKOL_ASSERT(desc->environment.vulkan.queue);
  18737. SOKOL_ASSERT(desc->uniform_buffer_size > 0);
  18738. _sg.vk.valid = true;
  18739. _sg.vk.phys_dev = (VkPhysicalDevice) desc->environment.vulkan.physical_device;
  18740. _sg.vk.dev = (VkDevice) desc->environment.vulkan.device;
  18741. _sg.vk.queue = (VkQueue) desc->environment.vulkan.queue;
  18742. _sg.vk.queue_family_index = desc->environment.vulkan.queue_family_index;
  18743. _sg_track_init(&_sg.vk.track.buffers, _sg.pools.buffer_pool.size);
  18744. _sg_track_init(&_sg.vk.track.images, _sg.pools.image_pool.size);
  18745. _sg_vk_load_ext_funcs();
  18746. _sg_vk_init_caps();
  18747. _sg_vk_create_fences();
  18748. _sg_vk_create_frame_command_pool_and_buffers();
  18749. _sg_vk_staging_copy_init();
  18750. _sg_vk_staging_stream_init();
  18751. _sg_vk_uniform_init();
  18752. _sg_vk_bind_init();
  18753. _sg_vk_create_delete_queues();
  18754. }
  18755. _SOKOL_PRIVATE void _sg_vk_discard_backend(void) {
  18756. SOKOL_ASSERT(_sg.vk.valid);
  18757. SOKOL_ASSERT(_sg.vk.dev);
  18758. vkDeviceWaitIdle(_sg.vk.dev);
  18759. _sg_vk_destroy_delete_queues();
  18760. _sg_vk_bind_discard();
  18761. _sg_vk_uniform_discard();
  18762. _sg_vk_staging_stream_discard();
  18763. _sg_vk_staging_copy_discard();
  18764. _sg_vk_destroy_frame_command_pool();
  18765. _sg_vk_destroy_fences();
  18766. _sg_track_discard(&_sg.vk.track.images);
  18767. _sg_track_discard(&_sg.vk.track.buffers);
  18768. _sg.vk.valid = false;
  18769. }
  18770. _SOKOL_PRIVATE void _sg_vk_reset_state_cache(void) {
  18771. // nothing to do here
  18772. }
  18773. _SOKOL_PRIVATE sg_resource_state _sg_vk_create_buffer(_sg_buffer_t* buf, const sg_buffer_desc* desc) {
  18774. SOKOL_ASSERT(_sg.vk.dev);
  18775. SOKOL_ASSERT(buf && desc);
  18776. SOKOL_ASSERT(buf->cmn.size > 0);
  18777. SOKOL_ASSERT(0 == buf->vk.buf);
  18778. SOKOL_ASSERT(0 == buf->vk.mem);
  18779. SOKOL_ASSERT(0 == buf->vk.dev_addr);
  18780. VkResult res;
  18781. // FIXME: inject external buffer
  18782. buf->vk.cur_access = _SG_VK_ACCESS_NONE;
  18783. _SG_STRUCT(VkBufferCreateInfo, create_info);
  18784. create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  18785. create_info.size = (VkDeviceSize)buf->cmn.size;
  18786. create_info.usage = _sg_vk_buffer_usage(&buf->cmn.usage);
  18787. create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  18788. res = vkCreateBuffer(_sg.vk.dev, &create_info, 0, &buf->vk.buf);
  18789. if (res != VK_SUCCESS) {
  18790. _SG_ERROR(VULKAN_CREATE_BUFFER_FAILED);
  18791. return SG_RESOURCESTATE_FAILED;
  18792. }
  18793. SOKOL_ASSERT(buf->vk.buf);
  18794. _sg_vk_set_object_label(VK_OBJECT_TYPE_BUFFER, (uint64_t)buf->vk.buf, desc->label);
  18795. if (!_sg_vk_mem_alloc_buffer_device_memory(buf)) {
  18796. return SG_RESOURCESTATE_FAILED;
  18797. }
  18798. SOKOL_ASSERT(buf->vk.mem);
  18799. res = vkBindBufferMemory(_sg.vk.dev, buf->vk.buf, buf->vk.mem, 0);
  18800. if (res != VK_SUCCESS) {
  18801. _SG_ERROR(VULKAN_BIND_BUFFER_MEMORY_FAILED);
  18802. return SG_RESOURCESTATE_FAILED;
  18803. }
  18804. if (buf->cmn.usage.storage_buffer) {
  18805. _SG_STRUCT(VkBufferDeviceAddressInfo, addr_info);
  18806. addr_info.sType = VK_STRUCTURE_TYPE_BUFFER_DEVICE_ADDRESS_INFO;
  18807. addr_info.buffer = buf->vk.buf;
  18808. buf->vk.dev_addr = vkGetBufferDeviceAddress(_sg.vk.dev, &addr_info);
  18809. SOKOL_ASSERT(buf->vk.dev_addr);
  18810. }
  18811. if (buf->cmn.usage.immutable && desc->data.ptr) {
  18812. _sg_vk_staging_copy_buffer_data(buf, &desc->data, 0, false);
  18813. }
  18814. return SG_RESOURCESTATE_VALID;
  18815. }
  18816. _SOKOL_PRIVATE void _sg_vk_discard_buffer(_sg_buffer_t* buf) {
  18817. SOKOL_ASSERT(buf);
  18818. _sg_track_remove(&_sg.vk.track.buffers, buf->slot.id);
  18819. if (buf->vk.buf) {
  18820. _sg_vk_delete_queue_add(_sg_vk_buffer_destructor, (void*)buf->vk.buf);
  18821. buf->vk.buf = 0;
  18822. }
  18823. if (buf->vk.mem) {
  18824. _sg_vk_delete_queue_add(_sg_vk_memory_destructor, (void*)buf->vk.mem);
  18825. buf->vk.mem = 0;
  18826. }
  18827. }
  18828. _SOKOL_PRIVATE sg_resource_state _sg_vk_create_image(_sg_image_t* img, const sg_image_desc* desc) {
  18829. SOKOL_ASSERT(img && desc);
  18830. VkResult res;
  18831. // FIXME: injected images
  18832. img->vk.cur_access = _SG_VK_ACCESS_NONE;
  18833. _SG_STRUCT(VkImageCreateInfo, create_info);
  18834. create_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
  18835. create_info.flags = _sg_vk_image_create_flags(img->cmn.type);
  18836. create_info.imageType = _sg_vk_image_type(img->cmn.type);
  18837. create_info.format = _sg_vk_format(desc->pixel_format);
  18838. create_info.extent.width = (uint32_t)img->cmn.width;
  18839. create_info.extent.height = (uint32_t)img->cmn.height;
  18840. if (desc->type == SG_IMAGETYPE_3D) {
  18841. create_info.extent.depth = (uint32_t)img->cmn.num_slices;
  18842. create_info.arrayLayers = 1;
  18843. } else {
  18844. create_info.extent.depth = 1;
  18845. create_info.arrayLayers = (uint32_t)img->cmn.num_slices;
  18846. }
  18847. create_info.mipLevels = (uint32_t)img->cmn.num_mipmaps;
  18848. create_info.samples = (VkSampleCountFlagBits)desc->sample_count;
  18849. create_info.tiling = VK_IMAGE_TILING_OPTIMAL;
  18850. create_info.usage = _sg_vk_image_usage(&img->cmn.usage);
  18851. create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  18852. create_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  18853. res = vkCreateImage(_sg.vk.dev, &create_info, 0, &img->vk.img);
  18854. if (res != VK_SUCCESS) {
  18855. _SG_ERROR(VULKAN_CREATE_IMAGE_FAILED);
  18856. return SG_RESOURCESTATE_FAILED;
  18857. }
  18858. SOKOL_ASSERT(img->vk.img);
  18859. _sg_vk_set_object_label(VK_OBJECT_TYPE_IMAGE, (uint64_t)img->vk.img, desc->label);
  18860. if (!_sg_vk_mem_alloc_image_device_memory(img)) {
  18861. return SG_RESOURCESTATE_FAILED;
  18862. }
  18863. SOKOL_ASSERT(img->vk.mem);
  18864. res = vkBindImageMemory(_sg.vk.dev, img->vk.img, img->vk.mem, 0);
  18865. if (res != VK_SUCCESS) {
  18866. _SG_ERROR(VULKAN_BIND_IMAGE_MEMORY_FAILED);
  18867. return SG_RESOURCESTATE_FAILED;
  18868. }
  18869. if (img->cmn.usage.immutable && desc->data.mip_levels[0].ptr) {
  18870. _sg_vk_staging_copy_image_data(img, &desc->data, false);
  18871. }
  18872. return SG_RESOURCESTATE_VALID;
  18873. }
  18874. _SOKOL_PRIVATE void _sg_vk_discard_image(_sg_image_t* img) {
  18875. SOKOL_ASSERT(img);
  18876. _sg_track_remove(&_sg.vk.track.images, img->slot.id);
  18877. if (img->vk.img) {
  18878. _sg_vk_delete_queue_add(_sg_vk_image_destructor, (void*)img->vk.img);
  18879. img->vk.img = 0;
  18880. }
  18881. if (img->vk.mem) {
  18882. _sg_vk_delete_queue_add(_sg_vk_memory_destructor, (void*)img->vk.mem);
  18883. img->vk.mem = 0;
  18884. }
  18885. }
  18886. _SOKOL_PRIVATE sg_resource_state _sg_vk_create_sampler(_sg_sampler_t* smp, const sg_sampler_desc* desc) {
  18887. SOKOL_ASSERT(smp && desc);
  18888. SOKOL_ASSERT(_sg.vk.dev);
  18889. SOKOL_ASSERT(0 == smp->vk.smp);
  18890. // FIXME: injection
  18891. // create sampler object
  18892. _SG_STRUCT(VkSamplerCreateInfo, create_info);
  18893. create_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
  18894. create_info.magFilter = _sg_vk_sampler_minmag_filter(desc->mag_filter);
  18895. create_info.minFilter = _sg_vk_sampler_minmag_filter(desc->min_filter);
  18896. create_info.mipmapMode = _sg_vk_sampler_mipmap_mode(desc->mipmap_filter);
  18897. create_info.addressModeU = _sg_vk_sampler_address_mode(desc->wrap_u);
  18898. create_info.addressModeV = _sg_vk_sampler_address_mode(desc->wrap_v);
  18899. create_info.addressModeW = _sg_vk_sampler_address_mode(desc->wrap_w);
  18900. create_info.mipLodBias = 0.0f;
  18901. if (desc->max_anisotropy > 1) {
  18902. create_info.anisotropyEnable = VK_TRUE;
  18903. create_info.maxAnisotropy = (float)desc->max_anisotropy;
  18904. }
  18905. if (desc->compare != SG_COMPAREFUNC_NEVER) {
  18906. create_info.compareEnable = VK_TRUE;
  18907. create_info.compareOp = _sg_vk_compare_op(desc->compare);
  18908. }
  18909. create_info.minLod = desc->min_lod;
  18910. create_info.maxLod = desc->max_lod;
  18911. create_info.borderColor = _sg_vk_sampler_border_color(desc->border_color);
  18912. VkResult res = vkCreateSampler(_sg.vk.dev, &create_info, 0, &smp->vk.smp);
  18913. if (res != VK_SUCCESS) {
  18914. _SG_ERROR(VULKAN_CREATE_SAMPLER_FAILED);
  18915. return SG_RESOURCESTATE_FAILED;
  18916. }
  18917. SOKOL_ASSERT(smp->vk.smp);
  18918. _sg_vk_set_object_label(VK_OBJECT_TYPE_SAMPLER, (uint64_t)smp->vk.smp, desc->label);
  18919. // record sampler descriptor data
  18920. smp->vk.descriptor_size = _sg.vk.descriptor_buffer_props.samplerDescriptorSize;
  18921. if (_SG_VK_MAX_DESCRIPTOR_DATA_SIZE < smp->vk.descriptor_size) {
  18922. _SG_ERROR(VULKAN_SAMPLER_MAX_DESCRIPTOR_SIZE);
  18923. return SG_RESOURCESTATE_FAILED;
  18924. }
  18925. _SG_STRUCT(VkDescriptorGetInfoEXT, get_info);
  18926. get_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT;
  18927. get_info.type = VK_DESCRIPTOR_TYPE_SAMPLER;
  18928. get_info.data.pSampler = &smp->vk.smp;
  18929. _sg.vk.ext.get_descriptor(_sg.vk.dev, &get_info, smp->vk.descriptor_size, &smp->vk.descriptor_data);
  18930. return SG_RESOURCESTATE_VALID;
  18931. }
  18932. _SOKOL_PRIVATE void _sg_vk_discard_sampler(_sg_sampler_t* smp) {
  18933. SOKOL_ASSERT(smp);
  18934. if (smp->vk.smp) {
  18935. _sg_vk_delete_queue_add(_sg_vk_sampler_destructor, (void*)smp->vk.smp);
  18936. smp->vk.smp = 0;
  18937. }
  18938. }
  18939. _SOKOL_PRIVATE _sg_vk_shader_func_t _sg_vk_create_shader_func(const sg_shader_function* func, const char* label) {
  18940. SOKOL_ASSERT(_sg.vk.dev);
  18941. SOKOL_ASSERT(func);
  18942. SOKOL_ASSERT(func->bytecode.ptr && (func->bytecode.size > 0));
  18943. SOKOL_ASSERT(func->entry);
  18944. _SG_STRUCT(_sg_vk_shader_func_t, vk_func);
  18945. _sg_strcpy(&vk_func.entry, func->entry);
  18946. _SG_STRUCT(VkShaderModuleCreateInfo, create_info);
  18947. create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  18948. create_info.codeSize = func->bytecode.size;
  18949. create_info.pCode = (uint32_t*)func->bytecode.ptr;
  18950. VkResult res = vkCreateShaderModule(_sg.vk.dev, &create_info, 0, &vk_func.module);
  18951. if (VK_SUCCESS != res) {
  18952. _SG_ERROR(VULKAN_CREATE_SHADER_MODULE_FAILED);
  18953. } else {
  18954. SOKOL_ASSERT(vk_func.module);
  18955. _sg_vk_set_object_label(VK_OBJECT_TYPE_SHADER_MODULE, (uint64_t)vk_func.module, label);
  18956. }
  18957. return vk_func;
  18958. }
  18959. _SOKOL_PRIVATE void _sg_vk_discard_shader_func(_sg_vk_shader_func_t* func) {
  18960. SOKOL_ASSERT(_sg.vk.dev);
  18961. SOKOL_ASSERT(func);
  18962. if (func->module) {
  18963. _sg_vk_delete_queue_add(_sg_vk_shader_module_destructor, (void*)func->module);
  18964. func->module = 0;
  18965. }
  18966. }
  18967. _SOKOL_PRIVATE bool _sg_vk_ensure_spirv_bindslot_ranges(const sg_shader_desc* desc) {
  18968. SOKOL_ASSERT(desc);
  18969. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  18970. const sg_shader_uniform_block* ub = &desc->uniform_blocks[i];
  18971. if (ub->stage != SG_SHADERSTAGE_NONE) {
  18972. if (ub->spirv_set0_binding_n >= _SG_VK_MAX_UB_DESCRIPTORSET_SLOTS) {
  18973. _SG_ERROR(VULKAN_UNIFORMBLOCK_SPIRV_SET0_BINDING_OUT_OF_RANGE);
  18974. return false;
  18975. }
  18976. }
  18977. }
  18978. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  18979. const sg_shader_view* view = &desc->views[i];
  18980. if (view->texture.stage != SG_SHADERSTAGE_NONE) {
  18981. if (view->texture.spirv_set1_binding_n >= _SG_VK_MAX_VIEW_SMP_DESCRIPTORSET_SLOTS) {
  18982. _SG_ERROR(VULKAN_TEXTURE_SPIRV_SET1_BINDING_OUT_OF_RANGE);
  18983. return false;
  18984. }
  18985. }
  18986. if (view->storage_buffer.stage != SG_SHADERSTAGE_NONE) {
  18987. if (view->storage_buffer.spirv_set1_binding_n >= _SG_VK_MAX_VIEW_SMP_DESCRIPTORSET_SLOTS) {
  18988. _SG_ERROR(VULKAN_STORAGEBUFFER_SPIRV_SET1_BINDING_OUT_OF_RANGE);
  18989. return false;
  18990. }
  18991. }
  18992. if (view->storage_image.stage != SG_SHADERSTAGE_NONE) {
  18993. if (view->storage_image.spirv_set1_binding_n >= _SG_VK_MAX_VIEW_SMP_DESCRIPTORSET_SLOTS) {
  18994. _SG_ERROR(VULKAN_STORAGEIMAGE_SPIRV_SET1_BINDING_OUT_OF_RANGE);
  18995. return false;
  18996. }
  18997. }
  18998. }
  18999. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  19000. const sg_shader_sampler* smp = &desc->samplers[i];
  19001. if (smp->stage != SG_SHADERSTAGE_NONE) {
  19002. if (smp->spirv_set1_binding_n >= _SG_VK_MAX_VIEW_SMP_DESCRIPTORSET_SLOTS) {
  19003. _SG_ERROR(VULKAN_SAMPLER_SPIRV_SET1_BINDING_OUT_OF_RANGE);
  19004. return false;
  19005. }
  19006. }
  19007. }
  19008. return true;
  19009. }
  19010. _SOKOL_PRIVATE sg_resource_state _sg_vk_create_shader(_sg_shader_t* shd, const sg_shader_desc* desc) {
  19011. SOKOL_ASSERT(shd && desc);
  19012. SOKOL_ASSERT(_sg.vk.dev);
  19013. SOKOL_ASSERT(shd->vk.vertex_func.module == 0);
  19014. SOKOL_ASSERT(shd->vk.fragment_func.module == 0);
  19015. SOKOL_ASSERT(shd->vk.compute_func.module == 0);
  19016. SOKOL_ASSERT(shd->vk.ub_dsl == 0);
  19017. SOKOL_ASSERT(shd->vk.view_smp_dsl == 0);
  19018. if (!_sg_vk_ensure_spirv_bindslot_ranges(desc)) {
  19019. return SG_RESOURCESTATE_FAILED;
  19020. }
  19021. // build shader modules
  19022. bool shd_valid = true;
  19023. if (desc->vertex_func.bytecode.ptr) {
  19024. shd->vk.vertex_func = _sg_vk_create_shader_func(&desc->vertex_func, desc->label);
  19025. shd_valid &= shd->vk.vertex_func.module != 0;
  19026. }
  19027. if (desc->fragment_func.bytecode.ptr) {
  19028. shd->vk.fragment_func = _sg_vk_create_shader_func(&desc->fragment_func, desc->label);
  19029. shd_valid &= shd->vk.fragment_func.module != 0;
  19030. }
  19031. if (desc->compute_func.bytecode.ptr) {
  19032. shd->vk.compute_func = _sg_vk_create_shader_func(&desc->compute_func, desc->label);
  19033. shd_valid &= shd->vk.compute_func.module != 0;
  19034. }
  19035. if (!shd_valid) {
  19036. _sg_vk_discard_shader_func(&shd->vk.vertex_func);
  19037. _sg_vk_discard_shader_func(&shd->vk.fragment_func);
  19038. _sg_vk_discard_shader_func(&shd->vk.compute_func);
  19039. return SG_RESOURCESTATE_FAILED;
  19040. }
  19041. // descriptor set layouts and pipeline layout
  19042. VkResult res;
  19043. _SG_STRUCT(VkDescriptorSetLayoutBinding, dsl_entries[_SG_VK_MAX_VIEW_SMP_DESCRIPTORSET_ENTRIES]);
  19044. _SG_STRUCT(VkDescriptorSetLayoutCreateInfo, dsl_create_info);
  19045. size_t dsl_index = 0;
  19046. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  19047. if (shd->cmn.uniform_blocks[i].stage == SG_SHADERSTAGE_NONE) {
  19048. continue;
  19049. }
  19050. shd->vk.ub_set0_bnd_n[i] = desc->uniform_blocks[i].spirv_set0_binding_n;
  19051. VkDescriptorSetLayoutBinding* dsl_entry = &dsl_entries[dsl_index];
  19052. dsl_entry->binding = shd->vk.ub_set0_bnd_n[i];
  19053. dsl_entry->descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  19054. dsl_entry->descriptorCount = 1;
  19055. dsl_entry->stageFlags = _sg_vk_shader_stage(shd->cmn.uniform_blocks[i].stage);
  19056. dsl_index += 1;
  19057. }
  19058. dsl_create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  19059. dsl_create_info.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT;
  19060. dsl_create_info.bindingCount = dsl_index;
  19061. dsl_create_info.pBindings = dsl_entries;
  19062. res = vkCreateDescriptorSetLayout(_sg.vk.dev, &dsl_create_info, 0, &shd->vk.ub_dsl);
  19063. if (res != VK_SUCCESS) {
  19064. _SG_ERROR(VULKAN_CREATE_DESCRIPTOR_SET_LAYOUT_FAILED);
  19065. return SG_RESOURCESTATE_FAILED;
  19066. }
  19067. // store uniform descriptor set size and descriptor offsets
  19068. _sg.vk.ext.get_descriptor_set_layout_size(_sg.vk.dev, shd->vk.ub_dsl, &shd->vk.ub_dset_size);
  19069. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  19070. if (shd->cmn.uniform_blocks[i].stage == SG_SHADERSTAGE_NONE) {
  19071. continue;
  19072. }
  19073. const uint8_t vk_bnd = shd->vk.ub_set0_bnd_n[i];
  19074. VkDeviceSize dset_offset = 0;
  19075. _sg.vk.ext.get_descriptor_set_layout_binding_offset(_sg.vk.dev, shd->vk.ub_dsl, vk_bnd, &dset_offset);
  19076. shd->vk.ub_dset_offsets[i] = dset_offset;
  19077. }
  19078. _sg_clear(dsl_entries, sizeof(dsl_entries));
  19079. _sg_clear(&dsl_create_info, sizeof(dsl_create_info));
  19080. dsl_index = 0;
  19081. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  19082. if (shd->cmn.views[i].stage == SG_SHADERSTAGE_NONE) {
  19083. continue;
  19084. }
  19085. SOKOL_ASSERT(dsl_index < _SG_VK_MAX_VIEW_SMP_DESCRIPTORSET_ENTRIES);
  19086. VkDescriptorSetLayoutBinding* dsl_entry = &dsl_entries[dsl_index];
  19087. dsl_entry->stageFlags = _sg_vk_shader_stage(shd->cmn.views[i].stage);
  19088. if (shd->cmn.views[i].view_type == SG_VIEWTYPE_TEXTURE) {
  19089. shd->vk.view_set1_bnd_n[i] = desc->views[i].texture.spirv_set1_binding_n;
  19090. dsl_entry->descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  19091. } else if (shd->cmn.views[i].view_type == SG_VIEWTYPE_STORAGEBUFFER) {
  19092. shd->vk.view_set1_bnd_n[i] = desc->views[i].storage_buffer.spirv_set1_binding_n;
  19093. dsl_entry->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  19094. } else if (shd->cmn.views[i].view_type == SG_VIEWTYPE_STORAGEIMAGE) {
  19095. shd->vk.view_set1_bnd_n[i] = desc->views[i].storage_image.spirv_set1_binding_n;
  19096. dsl_entry->descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  19097. } else {
  19098. SOKOL_UNREACHABLE;
  19099. }
  19100. dsl_entry->binding = shd->vk.view_set1_bnd_n[i];
  19101. dsl_entry->descriptorCount = 1;
  19102. dsl_index += 1;
  19103. }
  19104. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  19105. if (shd->cmn.samplers[i].stage == SG_SHADERSTAGE_NONE) {
  19106. continue;
  19107. }
  19108. shd->vk.smp_set1_bnd_n[i] = desc->samplers[i].spirv_set1_binding_n;
  19109. SOKOL_ASSERT(dsl_index < _SG_VK_MAX_VIEW_SMP_DESCRIPTORSET_ENTRIES);
  19110. VkDescriptorSetLayoutBinding* dsl_entry = &dsl_entries[dsl_index];
  19111. dsl_entry->binding = shd->vk.smp_set1_bnd_n[i];
  19112. dsl_entry->descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
  19113. dsl_entry->descriptorCount = 1;
  19114. dsl_entry->stageFlags = _sg_vk_shader_stage(shd->cmn.samplers[i].stage);
  19115. dsl_index += 1;
  19116. }
  19117. dsl_create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  19118. dsl_create_info.flags = VK_DESCRIPTOR_SET_LAYOUT_CREATE_DESCRIPTOR_BUFFER_BIT_EXT;
  19119. dsl_create_info.bindingCount = dsl_index;
  19120. dsl_create_info.pBindings = dsl_entries;
  19121. res = vkCreateDescriptorSetLayout(_sg.vk.dev, &dsl_create_info, 0, &shd->vk.view_smp_dsl);
  19122. if (res != VK_SUCCESS) {
  19123. _SG_ERROR(VULKAN_CREATE_DESCRIPTOR_SET_LAYOUT_FAILED);
  19124. return SG_RESOURCESTATE_FAILED;
  19125. }
  19126. // store view/smp descriptor set size and descriptor offsets
  19127. _sg.vk.ext.get_descriptor_set_layout_size(_sg.vk.dev, shd->vk.view_smp_dsl, &shd->vk.view_smp_dset_size);
  19128. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  19129. if (shd->cmn.views[i].stage == SG_SHADERSTAGE_NONE) {
  19130. continue;
  19131. }
  19132. const uint8_t vk_bnd = shd->vk.view_set1_bnd_n[i];
  19133. VkDeviceSize dset_offset = 0;
  19134. _sg.vk.ext.get_descriptor_set_layout_binding_offset(_sg.vk.dev, shd->vk.view_smp_dsl, vk_bnd, &dset_offset);
  19135. shd->vk.view_dset_offsets[i] = dset_offset;
  19136. }
  19137. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  19138. if (shd->cmn.samplers[i].stage == SG_SHADERSTAGE_NONE) {
  19139. continue;
  19140. }
  19141. const uint8_t vk_bnd = shd->vk.smp_set1_bnd_n[i];
  19142. VkDeviceSize dset_offset = 0;
  19143. _sg.vk.ext.get_descriptor_set_layout_binding_offset(_sg.vk.dev, shd->vk.view_smp_dsl, vk_bnd, &dset_offset);
  19144. shd->vk.smp_dset_offsets[i] = dset_offset;
  19145. }
  19146. VkDescriptorSetLayout set_layouts[_SG_VK_NUM_DESCRIPTORSETS] = {
  19147. shd->vk.ub_dsl,
  19148. shd->vk.view_smp_dsl,
  19149. };
  19150. _SG_STRUCT(VkPipelineLayoutCreateInfo, pl_create_info);
  19151. pl_create_info.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
  19152. pl_create_info.setLayoutCount = _SG_VK_NUM_DESCRIPTORSETS;
  19153. pl_create_info.pSetLayouts = set_layouts;
  19154. res = vkCreatePipelineLayout(_sg.vk.dev, &pl_create_info, 0, &shd->vk.pip_layout);
  19155. if (res != VK_SUCCESS) {
  19156. _SG_ERROR(VULKAN_CREATE_PIPELINE_LAYOUT_FAILED);
  19157. return SG_RESOURCESTATE_FAILED;
  19158. }
  19159. return SG_RESOURCESTATE_VALID;
  19160. }
  19161. _SOKOL_PRIVATE void _sg_vk_discard_shader(_sg_shader_t* shd) {
  19162. SOKOL_ASSERT(shd);
  19163. SOKOL_ASSERT(_sg.vk.dev);
  19164. _sg_vk_discard_shader_func(&shd->vk.vertex_func);
  19165. _sg_vk_discard_shader_func(&shd->vk.fragment_func);
  19166. _sg_vk_discard_shader_func(&shd->vk.compute_func);
  19167. if (shd->vk.pip_layout) {
  19168. _sg_vk_delete_queue_add(_sg_vk_pipelinelayout_destructor, (void*)shd->vk.pip_layout);
  19169. shd->vk.pip_layout = 0;
  19170. }
  19171. if (shd->vk.ub_dsl) {
  19172. _sg_vk_delete_queue_add(_sg_vk_descriptorsetlayout_destructor, (void*)shd->vk.ub_dsl);
  19173. shd->vk.ub_dsl = 0;
  19174. }
  19175. if (shd->vk.view_smp_dsl) {
  19176. _sg_vk_delete_queue_add(_sg_vk_descriptorsetlayout_destructor, (void*)shd->vk.view_smp_dsl);
  19177. shd->vk.view_smp_dsl = 0;
  19178. }
  19179. }
  19180. _SOKOL_PRIVATE sg_resource_state _sg_vk_create_pipeline(_sg_pipeline_t* pip, const sg_pipeline_desc* desc) {
  19181. SOKOL_ASSERT(pip && desc);
  19182. SOKOL_ASSERT(_sg.vk.dev);
  19183. VkResult res;
  19184. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  19185. SOKOL_ASSERT(shd->vk.pip_layout);
  19186. if (pip->cmn.is_compute) {
  19187. SOKOL_ASSERT(shd->vk.compute_func.module);
  19188. _SG_STRUCT(VkComputePipelineCreateInfo, pip_create_info);
  19189. pip_create_info.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
  19190. pip_create_info.flags = VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT;
  19191. pip_create_info.stage.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  19192. pip_create_info.stage.stage = VK_SHADER_STAGE_COMPUTE_BIT;
  19193. pip_create_info.stage.module = shd->vk.compute_func.module;
  19194. pip_create_info.stage.pName = shd->vk.compute_func.entry.buf;
  19195. pip_create_info.layout = shd->vk.pip_layout;
  19196. res = vkCreateComputePipelines(_sg.vk.dev, VK_NULL_HANDLE, 1, &pip_create_info, 0, &pip->vk.pip);
  19197. if (res != VK_SUCCESS) {
  19198. _SG_ERROR(VULKAN_CREATE_COMPUTE_PIPELINE_FAILED);
  19199. return SG_RESOURCESTATE_FAILED;
  19200. }
  19201. } else {
  19202. uint32_t num_stages = 0;
  19203. _SG_STRUCT(VkPipelineShaderStageCreateInfo, stages[2]);
  19204. if (shd->vk.vertex_func.module) {
  19205. stages[num_stages].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  19206. stages[num_stages].stage = VK_SHADER_STAGE_VERTEX_BIT;
  19207. stages[num_stages].module = shd->vk.vertex_func.module;
  19208. stages[num_stages].pName = shd->vk.vertex_func.entry.buf;
  19209. num_stages += 1;
  19210. }
  19211. if (shd->vk.fragment_func.module) {
  19212. stages[num_stages].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  19213. stages[num_stages].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
  19214. stages[num_stages].module = shd->vk.fragment_func.module;
  19215. stages[num_stages].pName = shd->vk.fragment_func.entry.buf;
  19216. num_stages += 1;
  19217. }
  19218. uint32_t num_vtx_bnds = 0;
  19219. _SG_STRUCT(VkVertexInputBindingDescription, vtx_bnds[SG_MAX_VERTEXBUFFER_BINDSLOTS]);
  19220. for (uint32_t vbl_idx = 0; vbl_idx < SG_MAX_VERTEXBUFFER_BINDSLOTS; vbl_idx++, num_vtx_bnds++) {
  19221. const sg_vertex_buffer_layout_state* vbl_state = &desc->layout.buffers[vbl_idx];
  19222. if (0 == vbl_state->stride) {
  19223. break;
  19224. }
  19225. vtx_bnds[vbl_idx].binding = vbl_idx;
  19226. vtx_bnds[vbl_idx].stride = (uint32_t)vbl_state->stride;
  19227. vtx_bnds[vbl_idx].inputRate = _sg_vk_vertex_input_rate(vbl_state->step_func);
  19228. }
  19229. uint32_t num_vtx_attrs = 0;
  19230. _SG_STRUCT(VkVertexInputAttributeDescription, vtx_attrs[SG_MAX_VERTEX_ATTRIBUTES]);
  19231. for (uint32_t va_idx = 0; va_idx < SG_MAX_VERTEX_ATTRIBUTES; va_idx++, num_vtx_attrs++) {
  19232. const sg_vertex_attr_state* va_state = &desc->layout.attrs[va_idx];
  19233. if (SG_VERTEXFORMAT_INVALID == va_state->format) {
  19234. break;
  19235. }
  19236. const uint32_t vbl_idx = (uint32_t)va_state->buffer_index;
  19237. SOKOL_ASSERT(vbl_idx < SG_MAX_VERTEXBUFFER_BINDSLOTS);
  19238. SOKOL_ASSERT(pip->cmn.vertex_buffer_layout_active[vbl_idx]);
  19239. vtx_attrs[va_idx].location = va_idx;
  19240. vtx_attrs[va_idx].binding = vbl_idx;
  19241. vtx_attrs[va_idx].format = _sg_vk_vertex_format(va_state->format);
  19242. vtx_attrs[va_idx].offset = (uint32_t)va_state->offset;
  19243. }
  19244. _SG_STRUCT(VkPipelineVertexInputStateCreateInfo, vi_state);
  19245. vi_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  19246. vi_state.vertexBindingDescriptionCount = num_vtx_bnds;
  19247. vi_state.pVertexBindingDescriptions = vtx_bnds;
  19248. vi_state.vertexAttributeDescriptionCount = num_vtx_attrs;
  19249. vi_state.pVertexAttributeDescriptions = vtx_attrs;
  19250. _SG_STRUCT(VkPipelineInputAssemblyStateCreateInfo, ia_state);
  19251. ia_state.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
  19252. ia_state.topology = _sg_vk_primitive_topology(desc->primitive_type);
  19253. ia_state.primitiveRestartEnable = VK_FALSE; // FIXME: needs 'primitiveTopologyRestart feature enabled'
  19254. _SG_STRUCT(VkPipelineViewportStateCreateInfo, vp_state);
  19255. vp_state.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
  19256. vp_state.viewportCount = 1;
  19257. vp_state.scissorCount = 1;
  19258. _SG_STRUCT(VkPipelineRasterizationStateCreateInfo, rs_state);
  19259. rs_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
  19260. rs_state.depthClampEnable = false;
  19261. rs_state.rasterizerDiscardEnable = false;
  19262. rs_state.polygonMode = VK_POLYGON_MODE_FILL;
  19263. rs_state.cullMode = _sg_vk_cullmode(desc->cull_mode);
  19264. rs_state.frontFace = _sg_vk_frontface(desc->face_winding);
  19265. rs_state.depthBiasEnable = ((int32_t)desc->depth.bias) != 0;
  19266. rs_state.depthBiasConstantFactor = desc->depth.bias;
  19267. rs_state.depthBiasClamp = desc->depth.bias_clamp;
  19268. rs_state.depthBiasSlopeFactor = desc->depth.bias_slope_scale;
  19269. rs_state.lineWidth = 1.0f;
  19270. _SG_STRUCT(VkPipelineMultisampleStateCreateInfo, ms_state);
  19271. ms_state.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
  19272. ms_state.rasterizationSamples = (VkSampleCountFlagBits)desc->sample_count;
  19273. ms_state.alphaToCoverageEnable = desc->alpha_to_coverage_enabled;
  19274. _SG_STRUCT(VkPipelineDepthStencilStateCreateInfo, ds_state);
  19275. ds_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
  19276. ds_state.depthTestEnable = desc->depth.compare != SG_COMPAREFUNC_ALWAYS;
  19277. ds_state.depthWriteEnable = desc->depth.write_enabled;
  19278. ds_state.depthCompareOp = _sg_vk_compare_op(desc->depth.compare);
  19279. ds_state.depthBoundsTestEnable = false;
  19280. ds_state.stencilTestEnable = desc->stencil.enabled;
  19281. ds_state.front.failOp = _sg_vk_stencil_op(desc->stencil.front.fail_op);
  19282. ds_state.front.passOp = _sg_vk_stencil_op(desc->stencil.front.pass_op);
  19283. ds_state.front.depthFailOp = _sg_vk_stencil_op(desc->stencil.front.depth_fail_op);
  19284. ds_state.front.compareOp = _sg_vk_compare_op(desc->stencil.front.compare);
  19285. ds_state.front.compareMask = desc->stencil.read_mask;
  19286. ds_state.front.writeMask = desc->stencil.write_mask;
  19287. ds_state.front.reference = desc->stencil.ref;
  19288. ds_state.back.failOp = _sg_vk_stencil_op(desc->stencil.back.fail_op);
  19289. ds_state.back.passOp = _sg_vk_stencil_op(desc->stencil.back.pass_op);
  19290. ds_state.back.depthFailOp = _sg_vk_stencil_op(desc->stencil.back.depth_fail_op);
  19291. ds_state.back.compareOp = _sg_vk_compare_op(desc->stencil.back.compare);
  19292. ds_state.back.compareMask = desc->stencil.read_mask;
  19293. ds_state.back.writeMask = desc->stencil.write_mask;
  19294. ds_state.back.reference = desc->stencil.ref;
  19295. _SG_STRUCT(VkPipelineColorBlendAttachmentState, att_states[SG_MAX_COLOR_ATTACHMENTS]);
  19296. SOKOL_ASSERT(desc->color_count < SG_MAX_COLOR_ATTACHMENTS);
  19297. for (int i = 0; i < desc->color_count; i++) {
  19298. att_states[i].blendEnable = desc->colors[i].blend.enabled;
  19299. att_states[i].srcColorBlendFactor = _sg_vk_blend_factor(desc->colors[i].blend.src_factor_rgb);
  19300. att_states[i].dstColorBlendFactor = _sg_vk_blend_factor(desc->colors[i].blend.dst_factor_rgb);
  19301. att_states[i].colorBlendOp = _sg_vk_blend_op(desc->colors[i].blend.op_rgb);
  19302. att_states[i].srcAlphaBlendFactor = _sg_vk_blend_factor(desc->colors[i].blend.src_factor_alpha);
  19303. att_states[i].dstAlphaBlendFactor = _sg_vk_blend_factor(desc->colors[i].blend.dst_factor_alpha);
  19304. att_states[i].alphaBlendOp = _sg_vk_blend_op(desc->colors[i].blend.op_alpha);
  19305. att_states[i].colorWriteMask = _sg_vk_color_write_mask(desc->colors[i].write_mask);
  19306. }
  19307. _SG_STRUCT(VkPipelineColorBlendStateCreateInfo, cb_state);
  19308. cb_state.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
  19309. cb_state.logicOpEnable = false;
  19310. cb_state.attachmentCount = (uint32_t)desc->color_count;
  19311. cb_state.pAttachments = att_states;
  19312. cb_state.blendConstants[0] = desc->blend_color.r;
  19313. cb_state.blendConstants[1] = desc->blend_color.g;
  19314. cb_state.blendConstants[2] = desc->blend_color.b;
  19315. cb_state.blendConstants[3] = desc->blend_color.a;
  19316. _SG_STRUCT(VkFormat, color_formats[SG_MAX_COLOR_ATTACHMENTS]);
  19317. SOKOL_ASSERT(desc->color_count <= SG_MAX_COLOR_ATTACHMENTS);
  19318. for (int i = 0; i < desc->color_count; i++) {
  19319. color_formats[i] = _sg_vk_format(desc->colors[i].pixel_format);
  19320. }
  19321. _SG_STRUCT(VkPipelineRenderingCreateInfo, rnd_state);
  19322. rnd_state.sType = VK_STRUCTURE_TYPE_PIPELINE_RENDERING_CREATE_INFO;
  19323. rnd_state.colorAttachmentCount = (uint32_t)desc->color_count;
  19324. rnd_state.pColorAttachmentFormats = color_formats;
  19325. rnd_state.depthAttachmentFormat = _sg_vk_format(desc->depth.pixel_format);
  19326. if (_sg_is_depth_stencil_format(desc->depth.pixel_format)) {
  19327. rnd_state.stencilAttachmentFormat = _sg_vk_format(desc->depth.pixel_format);
  19328. } else {
  19329. rnd_state.stencilAttachmentFormat = VK_FORMAT_UNDEFINED;
  19330. }
  19331. VkDynamicState dyn_states[2] = {
  19332. VK_DYNAMIC_STATE_VIEWPORT,
  19333. VK_DYNAMIC_STATE_SCISSOR,
  19334. };
  19335. _SG_STRUCT(VkPipelineDynamicStateCreateInfo, dyn_state);
  19336. dyn_state.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
  19337. dyn_state.dynamicStateCount = 2;
  19338. dyn_state.pDynamicStates = dyn_states;
  19339. _SG_STRUCT(VkGraphicsPipelineCreateInfo, pip_create_info);
  19340. pip_create_info.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
  19341. pip_create_info.pNext = &rnd_state;
  19342. pip_create_info.flags = VK_PIPELINE_CREATE_DESCRIPTOR_BUFFER_BIT_EXT;
  19343. pip_create_info.stageCount = num_stages;
  19344. pip_create_info.pStages = stages;
  19345. pip_create_info.pVertexInputState = &vi_state;
  19346. pip_create_info.pInputAssemblyState = &ia_state;
  19347. pip_create_info.pViewportState = &vp_state;
  19348. pip_create_info.pRasterizationState = &rs_state;
  19349. pip_create_info.pMultisampleState = &ms_state;
  19350. pip_create_info.pDepthStencilState = &ds_state;
  19351. pip_create_info.pColorBlendState = &cb_state;
  19352. pip_create_info.pDynamicState = &dyn_state;
  19353. pip_create_info.layout = shd->vk.pip_layout;
  19354. res = vkCreateGraphicsPipelines(_sg.vk.dev, VK_NULL_HANDLE, 1, &pip_create_info, 0, &pip->vk.pip);
  19355. if (res != VK_SUCCESS) {
  19356. _SG_ERROR(VULKAN_CREATE_GRAPHICS_PIPELINE_FAILED);
  19357. return SG_RESOURCESTATE_FAILED;
  19358. }
  19359. }
  19360. SOKOL_ASSERT(pip->vk.pip);
  19361. _sg_vk_set_object_label(VK_OBJECT_TYPE_PIPELINE, (uint64_t)pip->vk.pip, desc->label);
  19362. return SG_RESOURCESTATE_VALID;
  19363. }
  19364. _SOKOL_PRIVATE void _sg_vk_discard_pipeline(_sg_pipeline_t* pip) {
  19365. SOKOL_ASSERT(pip);
  19366. if (pip->vk.pip) {
  19367. _sg_vk_delete_queue_add(_sg_vk_pipeline_destructor, (void*)pip->vk.pip);
  19368. pip->vk.pip = 0;
  19369. }
  19370. }
  19371. _SOKOL_PRIVATE sg_resource_state _sg_vk_create_view(_sg_view_t* view, const sg_view_desc* desc) {
  19372. SOKOL_ASSERT(view && desc);
  19373. SOKOL_ASSERT(_sg.vk.dev);
  19374. SOKOL_ASSERT(0 == view->vk.img_view);
  19375. VkResult res;
  19376. _SG_STRUCT(VkDescriptorGetInfoEXT, get_info);
  19377. get_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_GET_INFO_EXT;
  19378. if (view->cmn.type == SG_VIEWTYPE_STORAGEBUFFER) {
  19379. // record descriptor data for storage buffer
  19380. view->vk.descriptor_size = _sg.vk.descriptor_buffer_props.storageBufferDescriptorSize;
  19381. if (_SG_VK_MAX_DESCRIPTOR_DATA_SIZE < view->vk.descriptor_size) {
  19382. _SG_ERROR(VULKAN_VIEW_MAX_DESCRIPTOR_SIZE);
  19383. return SG_RESOURCESTATE_FAILED;
  19384. }
  19385. const _sg_buffer_t* buf = _sg_buffer_ref_ptr(&view->cmn.buf.ref);
  19386. SOKOL_ASSERT(buf->vk.dev_addr);
  19387. _SG_STRUCT(VkDescriptorAddressInfoEXT, addr_info);
  19388. addr_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_ADDRESS_INFO_EXT;
  19389. addr_info.address = buf->vk.dev_addr + (VkDeviceSize)view->cmn.buf.offset;
  19390. addr_info.range = (VkDeviceSize)(buf->cmn.size - view->cmn.buf.offset);
  19391. get_info.type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
  19392. get_info.data.pStorageBuffer = &addr_info;
  19393. _sg.vk.ext.get_descriptor(_sg.vk.dev, &get_info, view->vk.descriptor_size, &view->vk.descriptor_data);
  19394. } else {
  19395. // create image view object
  19396. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  19397. SOKOL_ASSERT(img->vk.img);
  19398. SOKOL_ASSERT(view->cmn.img.mip_level_count >= 1);
  19399. SOKOL_ASSERT(view->cmn.img.slice_count >= 1);
  19400. _SG_STRUCT(VkImageViewCreateInfo, create_info);
  19401. create_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  19402. create_info.image = img->vk.img;
  19403. if (view->cmn.type == SG_VIEWTYPE_TEXTURE) {
  19404. create_info.viewType = _sg_vk_texture_image_view_type(img->cmn.type);
  19405. } else {
  19406. create_info.viewType = _sg_vk_attachment_image_view_type(img->cmn.type);
  19407. }
  19408. create_info.format = _sg_vk_format(img->cmn.pixel_format);
  19409. if (view->cmn.type == SG_VIEWTYPE_DEPTHSTENCILATTACHMENT) {
  19410. create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  19411. if (_sg_is_depth_stencil_format(img->cmn.pixel_format)) {
  19412. create_info.subresourceRange.aspectMask |= VK_IMAGE_ASPECT_STENCIL_BIT;
  19413. }
  19414. } else if (_sg_is_depth_or_depth_stencil_format(img->cmn.pixel_format)) {
  19415. create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
  19416. } else {
  19417. create_info.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
  19418. }
  19419. create_info.subresourceRange.baseMipLevel = (uint32_t)view->cmn.img.mip_level;
  19420. create_info.subresourceRange.levelCount = (uint32_t)view->cmn.img.mip_level_count;
  19421. create_info.subresourceRange.baseArrayLayer = (uint32_t)view->cmn.img.slice;
  19422. create_info.subresourceRange.layerCount = (uint32_t)view->cmn.img.slice_count;
  19423. res = vkCreateImageView(_sg.vk.dev, &create_info, 0, &view->vk.img_view);
  19424. if (res != VK_SUCCESS) {
  19425. _SG_ERROR(VULKAN_CREATE_IMAGE_VIEW_FAILED);
  19426. return SG_RESOURCESTATE_FAILED;
  19427. }
  19428. SOKOL_ASSERT(view->vk.img_view);
  19429. _sg_vk_set_object_label(VK_OBJECT_TYPE_IMAGE_VIEW, (uint64_t)view->vk.img_view, desc->label);
  19430. // record descriptor data for storage images and textures
  19431. if ((view->cmn.type == SG_VIEWTYPE_STORAGEIMAGE) || (view->cmn.type == SG_VIEWTYPE_TEXTURE)) {
  19432. _SG_STRUCT(VkDescriptorImageInfo, img_info);
  19433. img_info.imageView = view->vk.img_view;
  19434. if (view->cmn.type == SG_VIEWTYPE_STORAGEIMAGE) {
  19435. view->vk.descriptor_size = _sg.vk.descriptor_buffer_props.storageImageDescriptorSize;
  19436. img_info.imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  19437. get_info.type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
  19438. get_info.data.pStorageImage = &img_info;
  19439. } else {
  19440. view->vk.descriptor_size = _sg.vk.descriptor_buffer_props.sampledImageDescriptorSize;
  19441. img_info.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  19442. get_info.type = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
  19443. get_info.data.pSampledImage = &img_info;
  19444. }
  19445. if (_SG_VK_MAX_DESCRIPTOR_DATA_SIZE < view->vk.descriptor_size) {
  19446. _SG_ERROR(VULKAN_VIEW_MAX_DESCRIPTOR_SIZE);
  19447. return SG_RESOURCESTATE_FAILED;
  19448. }
  19449. _sg.vk.ext.get_descriptor(_sg.vk.dev, &get_info, view->vk.descriptor_size, &view->vk.descriptor_data);
  19450. }
  19451. }
  19452. return SG_RESOURCESTATE_VALID;
  19453. }
  19454. _SOKOL_PRIVATE void _sg_vk_discard_view(_sg_view_t* view) {
  19455. SOKOL_ASSERT(view);
  19456. if (view->vk.img_view) {
  19457. _sg_vk_delete_queue_add(_sg_vk_image_view_destructor, (void*)view->vk.img_view);
  19458. view->vk.img_view = 0;
  19459. }
  19460. }
  19461. _SOKOL_PRIVATE void _sg_vk_apply_viewport(int x, int y, int w, int h, bool origin_top_left) {
  19462. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  19463. _SG_STRUCT(VkViewport, vp);
  19464. vp.x = (float) x;
  19465. vp.width = (float) w;
  19466. vp.height = (float) -h;
  19467. vp.maxDepth = 1.0f;
  19468. if (origin_top_left) {
  19469. vp.y = (float)(y + h);
  19470. } else {
  19471. vp.y = (float)(_sg.cur_pass.dim.height - y);
  19472. }
  19473. vkCmdSetViewport(_sg.vk.frame.cmd_buf, 0, 1, &vp);
  19474. }
  19475. _SOKOL_PRIVATE void _sg_vk_apply_scissor_rect(int x, int y, int w, int h, bool origin_top_left) {
  19476. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  19477. const _sg_recti_t clip = _sg_clipi(x, y, w, h, _sg.cur_pass.dim.width, _sg.cur_pass.dim.height);
  19478. _SG_STRUCT(VkRect2D, rect);
  19479. rect.offset.x = clip.x;
  19480. rect.offset.y = (origin_top_left ? clip.y : (_sg.cur_pass.dim.height - (clip.y + clip.h)));
  19481. rect.extent.width = (uint32_t) clip.w;
  19482. rect.extent.height = (uint32_t) clip.h;
  19483. vkCmdSetScissor(_sg.vk.frame.cmd_buf, 0, 1, &rect);
  19484. }
  19485. _SOKOL_PRIVATE void _sg_vk_init_color_attachment_info(VkRenderingAttachmentInfo* info, const sg_color_attachment_action* action, VkImageView color_view, VkImageView resolve_view) {
  19486. info->sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO;
  19487. info->imageView = color_view;
  19488. info->imageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  19489. if (resolve_view) {
  19490. info->resolveMode = VK_RESOLVE_MODE_AVERAGE_BIT;
  19491. info->resolveImageView = resolve_view;
  19492. info->resolveImageLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  19493. } else {
  19494. info->resolveMode = VK_RESOLVE_MODE_NONE;
  19495. info->resolveImageView = 0;
  19496. info->resolveImageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  19497. }
  19498. info->loadOp = _sg_vk_load_op(action->load_action);
  19499. info->storeOp = _sg_vk_store_op(action->store_action);
  19500. info->clearValue.color.float32[0] = action->clear_value.r;
  19501. info->clearValue.color.float32[1] = action->clear_value.g;
  19502. info->clearValue.color.float32[2] = action->clear_value.b;
  19503. info->clearValue.color.float32[3] = action->clear_value.a;
  19504. }
  19505. _SOKOL_PRIVATE void _sg_vk_init_depth_attachment_info(VkRenderingAttachmentInfo* info, const sg_depth_attachment_action* action, VkImageView ds_view) {
  19506. info->sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO;
  19507. info->imageView = ds_view;
  19508. info->imageLayout = VK_IMAGE_LAYOUT_DEPTH_ATTACHMENT_OPTIMAL;
  19509. info->resolveMode = VK_RESOLVE_MODE_NONE;
  19510. info->loadOp = _sg_vk_load_op(action->load_action);
  19511. info->storeOp = _sg_vk_store_op(action->store_action);
  19512. info->clearValue.depthStencil.depth = action->clear_value;
  19513. }
  19514. _SOKOL_PRIVATE void _sg_vk_init_stencil_attachment_info(VkRenderingAttachmentInfo* info, const sg_stencil_attachment_action* action, VkImageView ds_view) {
  19515. info->sType = VK_STRUCTURE_TYPE_RENDERING_ATTACHMENT_INFO;
  19516. info->imageView = ds_view;
  19517. info->imageLayout = VK_IMAGE_LAYOUT_STENCIL_ATTACHMENT_OPTIMAL;
  19518. info->resolveMode = VK_RESOLVE_MODE_NONE;
  19519. info->loadOp = _sg_vk_load_op(action->load_action);
  19520. info->storeOp = _sg_vk_store_op(action->store_action);
  19521. info->clearValue.depthStencil.stencil = action->clear_value;
  19522. }
  19523. _SOKOL_PRIVATE void _sg_vk_begin_compute_pass(VkCommandBuffer cmd_buf, const sg_pass* pass) {
  19524. // FIXME: nothing to do here?
  19525. _SOKOL_UNUSED(cmd_buf && pass);
  19526. }
  19527. _SOKOL_PRIVATE void _sg_vk_begin_render_pass(VkCommandBuffer cmd_buf, const sg_pass* pass, const _sg_attachments_ptrs_t* atts) {
  19528. const sg_pass_action* action = &pass->action;
  19529. const bool is_swapchain_pass = atts->empty;
  19530. _SG_STRUCT(VkRenderingAttachmentInfo, color_att_infos[SG_MAX_COLOR_ATTACHMENTS]);
  19531. _SG_STRUCT(VkRenderingAttachmentInfo, depth_att_info);
  19532. _SG_STRUCT(VkRenderingAttachmentInfo, stencil_att_info);
  19533. _SG_STRUCT(VkRenderingInfo, render_info);
  19534. render_info.sType = VK_STRUCTURE_TYPE_RENDERING_INFO;
  19535. render_info.renderArea.extent.width = (uint32_t)_sg.cur_pass.dim.width;
  19536. render_info.renderArea.extent.height = (uint32_t)_sg.cur_pass.dim.height;
  19537. render_info.layerCount = 1;
  19538. if (is_swapchain_pass) {
  19539. _sg.vk.swapchain = pass->swapchain.vulkan;
  19540. SOKOL_ASSERT(_sg.vk.swapchain.render_view);
  19541. if (pass->swapchain.sample_count > 1) {
  19542. SOKOL_ASSERT(_sg.vk.swapchain.resolve_view);
  19543. }
  19544. SOKOL_ASSERT(_sg.vk.swapchain.present_complete_semaphore);
  19545. SOKOL_ASSERT(_sg.vk.swapchain.render_finished_semaphore);
  19546. // FIXME: need to support multiple present_complete_semaphores
  19547. SOKOL_ASSERT(0 == _sg.vk.present_complete_sem);
  19548. _sg.vk.present_complete_sem = (VkSemaphore)_sg.vk.swapchain.present_complete_semaphore;
  19549. if (0 == _sg.vk.render_finished_sem) {
  19550. _sg.vk.render_finished_sem = (VkSemaphore)_sg.vk.swapchain.render_finished_semaphore;
  19551. } else {
  19552. SOKOL_ASSERT(_sg.vk.render_finished_sem == _sg.vk.swapchain.render_finished_semaphore);
  19553. }
  19554. VkImageView vk_color_view = (VkImageView)_sg.vk.swapchain.render_view;
  19555. VkImageView vk_resolve_view = (VkImageView)_sg.vk.swapchain.resolve_view;
  19556. _sg_vk_init_color_attachment_info(&color_att_infos[0], &action->colors[0], vk_color_view, vk_resolve_view);
  19557. render_info.colorAttachmentCount = 1;
  19558. render_info.pColorAttachments = color_att_infos;
  19559. if (_sg.vk.swapchain.depth_stencil_view) {
  19560. VkImageView vk_ds_view = (VkImageView)_sg.vk.swapchain.depth_stencil_view;
  19561. const bool has_stencil = _sg_is_depth_stencil_format(pass->swapchain.depth_format);
  19562. _sg_vk_init_depth_attachment_info(&depth_att_info, &action->depth, vk_ds_view);
  19563. render_info.pDepthAttachment = &depth_att_info;
  19564. if (has_stencil) {
  19565. _sg_vk_init_stencil_attachment_info(&stencil_att_info, &action->stencil, vk_ds_view);
  19566. render_info.pStencilAttachment = &stencil_att_info;
  19567. }
  19568. }
  19569. } else {
  19570. SOKOL_ASSERT(atts->num_color_views <= SG_MAX_COLOR_ATTACHMENTS);
  19571. for (int i = 0; i < atts->num_color_views; i++) {
  19572. SOKOL_ASSERT(atts->color_views[i]);
  19573. const _sg_view_t* color_view = atts->color_views[i];
  19574. VkImageView vk_color_view = color_view->vk.img_view;
  19575. const _sg_view_t* resolve_view = atts->resolve_views[i];
  19576. VkImageView vk_resolve_view = 0;
  19577. if (resolve_view) {
  19578. vk_resolve_view = resolve_view->vk.img_view;
  19579. }
  19580. _sg_vk_init_color_attachment_info(&color_att_infos[i], &action->colors[i], vk_color_view, vk_resolve_view);
  19581. }
  19582. if (atts->num_color_views > 0) {
  19583. render_info.colorAttachmentCount = (uint32_t)atts->num_color_views;
  19584. render_info.pColorAttachments = color_att_infos;
  19585. }
  19586. if (atts->ds_view) {
  19587. const _sg_view_t* ds_view = atts->ds_view;
  19588. const _sg_image_t* ds_image = _sg_image_ref_ptr(&ds_view->cmn.img.ref);
  19589. const bool has_stencil = _sg_is_depth_stencil_format(ds_image->cmn.pixel_format);
  19590. VkImageView vk_ds_view = ds_view->vk.img_view;
  19591. _sg_vk_init_depth_attachment_info(&depth_att_info, &action->depth, vk_ds_view);
  19592. render_info.pDepthAttachment = &depth_att_info;
  19593. if (has_stencil) {
  19594. _sg_vk_init_stencil_attachment_info(&stencil_att_info, &action->stencil, vk_ds_view);
  19595. render_info.pStencilAttachment = &stencil_att_info;
  19596. }
  19597. }
  19598. }
  19599. vkCmdBeginRendering(cmd_buf, &render_info);
  19600. _SG_STRUCT(VkViewport, vp);
  19601. vp.y = _sg.cur_pass.dim.height;
  19602. vp.width = (float)_sg.cur_pass.dim.width;
  19603. vp.height = (float)-_sg.cur_pass.dim.height;
  19604. vp.maxDepth = 1.0f;
  19605. vkCmdSetViewport(_sg.vk.frame.cmd_buf, 0, 1, &vp);
  19606. _SG_STRUCT(VkRect2D, rect);
  19607. rect.extent.width = (uint32_t)_sg.cur_pass.dim.width;
  19608. rect.extent.height = (uint32_t)_sg.cur_pass.dim.height;
  19609. vkCmdSetScissor(_sg.vk.frame.cmd_buf, 0, 1, &rect);
  19610. }
  19611. _SOKOL_PRIVATE void _sg_vk_begin_pass(const sg_pass* pass, const _sg_attachments_ptrs_t* atts) {
  19612. SOKOL_ASSERT(pass && atts);
  19613. _sg_vk_acquire_frame_command_buffers();
  19614. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  19615. _sg_vk_barrier_on_begin_pass(_sg.vk.frame.cmd_buf, pass, atts, _sg.cur_pass.is_compute);
  19616. if (_sg.cur_pass.is_compute) {
  19617. _sg_vk_begin_compute_pass(_sg.vk.frame.cmd_buf, pass);
  19618. } else {
  19619. _sg_vk_begin_render_pass(_sg.vk.frame.cmd_buf, pass, atts);
  19620. }
  19621. }
  19622. _SOKOL_PRIVATE void _sg_vk_end_pass(const _sg_attachments_ptrs_t* atts) {
  19623. SOKOL_ASSERT(atts);
  19624. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  19625. if (!_sg.cur_pass.is_compute) {
  19626. vkCmdEndRendering(_sg.vk.frame.cmd_buf);
  19627. }
  19628. _sg_vk_barrier_on_end_pass(_sg.vk.frame.cmd_buf, atts, _sg.cur_pass.is_compute);
  19629. _sg_clear(&_sg.vk.swapchain, sizeof(_sg.vk.swapchain));
  19630. }
  19631. _SOKOL_PRIVATE void _sg_vk_commit(void) {
  19632. SOKOL_ASSERT(_sg.vk.queue);
  19633. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  19634. _sg_vk_submit_frame_command_buffers();
  19635. _sg.vk.present_complete_sem = 0;
  19636. _sg.vk.render_finished_sem = 0;
  19637. }
  19638. _SOKOL_PRIVATE void _sg_vk_apply_pipeline(_sg_pipeline_t* pip) {
  19639. SOKOL_ASSERT(pip);
  19640. SOKOL_ASSERT(pip->vk.pip);
  19641. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  19642. _sg.vk.uniforms_dirty = false;
  19643. VkPipelineBindPoint bindpoint = pip->cmn.is_compute
  19644. ? VK_PIPELINE_BIND_POINT_COMPUTE
  19645. : VK_PIPELINE_BIND_POINT_GRAPHICS;
  19646. vkCmdBindPipeline(_sg.vk.frame.cmd_buf, bindpoint, pip->vk.pip);
  19647. }
  19648. _SOKOL_PRIVATE bool _sg_vk_apply_bindings(_sg_bindings_ptrs_t* bnd) {
  19649. SOKOL_ASSERT(bnd && bnd->pip);
  19650. SOKOL_ASSERT(_sg.vk.dev);
  19651. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  19652. VkCommandBuffer cmd_buf = _sg.vk.frame.cmd_buf;
  19653. // track or insert pipeline barriers
  19654. _sg_vk_barrier_on_apply_bindings(cmd_buf, bnd, _sg.cur_pass.is_compute);
  19655. if (!_sg.cur_pass.is_compute) {
  19656. // bind vertex buffers
  19657. // FIXME: could do this in a single call if buffer bindings are guaranteed
  19658. // to be continuous (currently that's not checked anywhere), or alternative
  19659. // via nullDescriptor robustness feature (which apparently may have performance downsides)
  19660. for (size_t i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  19661. if (bnd->vbs[i]) {
  19662. VkBuffer vk_buf = bnd->vbs[i]->vk.buf;
  19663. VkDeviceSize vk_offset = (VkDeviceSize)bnd->vb_offsets[i];
  19664. vkCmdBindVertexBuffers(cmd_buf, i, 1, &vk_buf, &vk_offset);
  19665. }
  19666. }
  19667. if (bnd->ib) {
  19668. VkBuffer vk_buf = bnd->ib->vk.buf;
  19669. VkDeviceSize vk_offset = (VkDeviceSize)bnd->ib_offset;
  19670. VkIndexType vk_index_type = _sg_vk_index_type(bnd->pip->cmn.index_type);
  19671. vkCmdBindIndexBuffer(cmd_buf, vk_buf, vk_offset, vk_index_type);
  19672. }
  19673. }
  19674. // bind views and samplers
  19675. const VkPipelineBindPoint pip_bind_point = _sg.cur_pass.is_compute
  19676. ? VK_PIPELINE_BIND_POINT_COMPUTE
  19677. : VK_PIPELINE_BIND_POINT_GRAPHICS;
  19678. return _sg_vk_bind_view_smp_descriptor_set(cmd_buf, bnd, pip_bind_point);
  19679. }
  19680. _SOKOL_PRIVATE void _sg_vk_apply_uniforms(int ub_slot, const sg_range* data) {
  19681. SOKOL_ASSERT(_sg.vk.uniform.cur_dev_addr);
  19682. SOKOL_ASSERT(data && data->ptr && (data->size > 0));
  19683. SOKOL_ASSERT((ub_slot >= 0) && (ub_slot < SG_MAX_UNIFORMBLOCK_BINDSLOTS));
  19684. // copy data into uniform buffer and keep track of uniform bind infos
  19685. const VkDeviceSize ubuf_offset = _sg_vk_uniform_copy(data);
  19686. if (_sg.vk.uniform.overflown) {
  19687. _SG_ERROR(VULKAN_UNIFORM_BUFFER_OVERFLOW);
  19688. _sg.next_draw_valid = false;
  19689. return;
  19690. }
  19691. _sg.vk.uniform_bindinfos[ub_slot].addr_info.range = data->size;
  19692. _sg.vk.uniform_bindinfos[ub_slot].addr_info.address = _sg.vk.uniform.cur_dev_addr + ubuf_offset;
  19693. _sg.vk.uniforms_dirty = true;
  19694. }
  19695. _SOKOL_PRIVATE void _sg_vk_draw(int base_element, int num_elements, int num_instances, int base_vertex, int base_instance) {
  19696. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  19697. VkCommandBuffer cmd_buf = _sg.vk.frame.cmd_buf;
  19698. if (_sg.vk.uniforms_dirty) {
  19699. if (!_sg_vk_bind_uniform_descriptor_set(cmd_buf)) {
  19700. return;
  19701. }
  19702. }
  19703. if (_sg.use_indexed_draw) {
  19704. vkCmdDrawIndexed(cmd_buf,
  19705. (uint32_t)num_elements,
  19706. (uint32_t)num_instances,
  19707. (uint32_t)base_element,
  19708. base_vertex,
  19709. (uint32_t)base_instance);
  19710. } else {
  19711. vkCmdDraw(cmd_buf,
  19712. (uint32_t)num_elements,
  19713. (uint32_t)num_instances,
  19714. (uint32_t)base_element,
  19715. (uint32_t)base_instance);
  19716. }
  19717. }
  19718. _SOKOL_PRIVATE void _sg_vk_dispatch(int num_groups_x, int num_groups_y, int num_groups_z) {
  19719. SOKOL_ASSERT(_sg.vk.frame.cmd_buf);
  19720. VkCommandBuffer cmd_buf = _sg.vk.frame.cmd_buf;
  19721. if (_sg.vk.uniforms_dirty) {
  19722. if (!_sg_vk_bind_uniform_descriptor_set(cmd_buf)) {
  19723. return;
  19724. }
  19725. }
  19726. vkCmdDispatch(cmd_buf, (uint32_t)num_groups_x, (uint32_t)num_groups_y, (uint32_t)num_groups_z);
  19727. }
  19728. _SOKOL_PRIVATE void _sg_vk_update_buffer(_sg_buffer_t* buf, const sg_range* data) {
  19729. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  19730. if (buf->cmn.usage.stream_update) {
  19731. _sg_vk_acquire_frame_command_buffers();
  19732. _sg_vk_staging_stream_buffer_data(buf, data, 0);
  19733. } else {
  19734. _sg_vk_staging_copy_buffer_data(buf, data, 0, true);
  19735. }
  19736. }
  19737. _SOKOL_PRIVATE void _sg_vk_append_buffer(_sg_buffer_t* buf, const sg_range* data, bool new_frame) {
  19738. SOKOL_ASSERT(buf && data && data->ptr && (data->size > 0));
  19739. _SOKOL_UNUSED(new_frame);
  19740. if (buf->cmn.usage.stream_update) {
  19741. _sg_vk_acquire_frame_command_buffers();
  19742. _sg_vk_staging_stream_buffer_data(buf, data, (size_t)buf->cmn.append_pos);
  19743. } else {
  19744. _sg_vk_staging_copy_buffer_data(buf, data, (size_t)buf->cmn.append_pos, true);
  19745. }
  19746. }
  19747. _SOKOL_PRIVATE void _sg_vk_update_image(_sg_image_t* img, const sg_image_data* data) {
  19748. SOKOL_ASSERT(img && data);
  19749. if (img->cmn.usage.stream_update) {
  19750. _sg_vk_acquire_frame_command_buffers();
  19751. _sg_vk_staging_stream_image_data(img, data);
  19752. } else {
  19753. _sg_vk_staging_copy_image_data(img, data, true);
  19754. }
  19755. }
  19756. #endif
  19757. // ██████ ███████ ███ ██ ███████ ██████ ██ ██████ ██████ █████ ██████ ██ ██ ███████ ███ ██ ██████
  19758. // ██ ██ ████ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██ ██ ██
  19759. // ██ ███ █████ ██ ██ ██ █████ ██████ ██ ██ ██████ ███████ ██ █████ █████ ██ ██ ██ ██ ██
  19760. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  19761. // ██████ ███████ ██ ████ ███████ ██ ██ ██ ██████ ██████ ██ ██ ██████ ██ ██ ███████ ██ ████ ██████
  19762. //
  19763. // >>generic backend
  19764. static inline void _sg_setup_backend(const sg_desc* desc) {
  19765. #if defined(_SOKOL_ANY_GL)
  19766. _sg_gl_setup_backend(desc);
  19767. #elif defined(SOKOL_METAL)
  19768. _sg_mtl_setup_backend(desc);
  19769. #elif defined(SOKOL_D3D11)
  19770. _sg_d3d11_setup_backend(desc);
  19771. #elif defined(SOKOL_WGPU)
  19772. _sg_wgpu_setup_backend(desc);
  19773. #elif defined(SOKOL_VULKAN)
  19774. _sg_vk_setup_backend(desc);
  19775. #elif defined(SOKOL_DUMMY_BACKEND)
  19776. _sg_dummy_setup_backend(desc);
  19777. #else
  19778. #error("INVALID BACKEND");
  19779. #endif
  19780. }
  19781. static inline void _sg_discard_backend(void) {
  19782. #if defined(_SOKOL_ANY_GL)
  19783. _sg_gl_discard_backend();
  19784. #elif defined(SOKOL_METAL)
  19785. _sg_mtl_discard_backend();
  19786. #elif defined(SOKOL_D3D11)
  19787. _sg_d3d11_discard_backend();
  19788. #elif defined(SOKOL_WGPU)
  19789. _sg_wgpu_discard_backend();
  19790. #elif defined(SOKOL_VULKAN)
  19791. _sg_vk_discard_backend();
  19792. #elif defined(SOKOL_DUMMY_BACKEND)
  19793. _sg_dummy_discard_backend();
  19794. #else
  19795. #error("INVALID BACKEND");
  19796. #endif
  19797. }
  19798. static inline void _sg_reset_state_cache(void) {
  19799. #if defined(_SOKOL_ANY_GL)
  19800. _sg_gl_reset_state_cache();
  19801. #elif defined(SOKOL_METAL)
  19802. _sg_mtl_reset_state_cache();
  19803. #elif defined(SOKOL_D3D11)
  19804. _sg_d3d11_reset_state_cache();
  19805. #elif defined(SOKOL_WGPU)
  19806. _sg_wgpu_reset_state_cache();
  19807. #elif defined(SOKOL_VULKAN)
  19808. _sg_vk_reset_state_cache();
  19809. #elif defined(SOKOL_DUMMY_BACKEND)
  19810. _sg_dummy_reset_state_cache();
  19811. #else
  19812. #error("INVALID BACKEND");
  19813. #endif
  19814. }
  19815. static inline sg_resource_state _sg_create_buffer(_sg_buffer_t* buf, const sg_buffer_desc* desc) {
  19816. #if defined(_SOKOL_ANY_GL)
  19817. return _sg_gl_create_buffer(buf, desc);
  19818. #elif defined(SOKOL_METAL)
  19819. return _sg_mtl_create_buffer(buf, desc);
  19820. #elif defined(SOKOL_D3D11)
  19821. return _sg_d3d11_create_buffer(buf, desc);
  19822. #elif defined(SOKOL_WGPU)
  19823. return _sg_wgpu_create_buffer(buf, desc);
  19824. #elif defined(SOKOL_VULKAN)
  19825. return _sg_vk_create_buffer(buf, desc);
  19826. #elif defined(SOKOL_DUMMY_BACKEND)
  19827. return _sg_dummy_create_buffer(buf, desc);
  19828. #else
  19829. #error("INVALID BACKEND");
  19830. #endif
  19831. }
  19832. static inline void _sg_discard_buffer(_sg_buffer_t* buf) {
  19833. #if defined(_SOKOL_ANY_GL)
  19834. _sg_gl_discard_buffer(buf);
  19835. #elif defined(SOKOL_METAL)
  19836. _sg_mtl_discard_buffer(buf);
  19837. #elif defined(SOKOL_D3D11)
  19838. _sg_d3d11_discard_buffer(buf);
  19839. #elif defined(SOKOL_WGPU)
  19840. _sg_wgpu_discard_buffer(buf);
  19841. #elif defined(SOKOL_VULKAN)
  19842. _sg_vk_discard_buffer(buf);
  19843. #elif defined(SOKOL_DUMMY_BACKEND)
  19844. _sg_dummy_discard_buffer(buf);
  19845. #else
  19846. #error("INVALID BACKEND");
  19847. #endif
  19848. }
  19849. static inline sg_resource_state _sg_create_image(_sg_image_t* img, const sg_image_desc* desc) {
  19850. #if defined(_SOKOL_ANY_GL)
  19851. return _sg_gl_create_image(img, desc);
  19852. #elif defined(SOKOL_METAL)
  19853. return _sg_mtl_create_image(img, desc);
  19854. #elif defined(SOKOL_D3D11)
  19855. return _sg_d3d11_create_image(img, desc);
  19856. #elif defined(SOKOL_WGPU)
  19857. return _sg_wgpu_create_image(img, desc);
  19858. #elif defined(SOKOL_VULKAN)
  19859. return _sg_vk_create_image(img, desc);
  19860. #elif defined(SOKOL_DUMMY_BACKEND)
  19861. return _sg_dummy_create_image(img, desc);
  19862. #else
  19863. #error("INVALID BACKEND");
  19864. #endif
  19865. }
  19866. static inline void _sg_discard_image(_sg_image_t* img) {
  19867. #if defined(_SOKOL_ANY_GL)
  19868. _sg_gl_discard_image(img);
  19869. #elif defined(SOKOL_METAL)
  19870. _sg_mtl_discard_image(img);
  19871. #elif defined(SOKOL_D3D11)
  19872. _sg_d3d11_discard_image(img);
  19873. #elif defined(SOKOL_WGPU)
  19874. _sg_wgpu_discard_image(img);
  19875. #elif defined(SOKOL_VULKAN)
  19876. _sg_vk_discard_image(img);
  19877. #elif defined(SOKOL_DUMMY_BACKEND)
  19878. _sg_dummy_discard_image(img);
  19879. #else
  19880. #error("INVALID BACKEND");
  19881. #endif
  19882. }
  19883. static inline sg_resource_state _sg_create_sampler(_sg_sampler_t* smp, const sg_sampler_desc* desc) {
  19884. #if defined(_SOKOL_ANY_GL)
  19885. return _sg_gl_create_sampler(smp, desc);
  19886. #elif defined(SOKOL_METAL)
  19887. return _sg_mtl_create_sampler(smp, desc);
  19888. #elif defined(SOKOL_D3D11)
  19889. return _sg_d3d11_create_sampler(smp, desc);
  19890. #elif defined(SOKOL_WGPU)
  19891. return _sg_wgpu_create_sampler(smp, desc);
  19892. #elif defined(SOKOL_VULKAN)
  19893. return _sg_vk_create_sampler(smp, desc);
  19894. #elif defined(SOKOL_DUMMY_BACKEND)
  19895. return _sg_dummy_create_sampler(smp, desc);
  19896. #else
  19897. #error("INVALID BACKEND");
  19898. #endif
  19899. }
  19900. static inline void _sg_discard_sampler(_sg_sampler_t* smp) {
  19901. #if defined(_SOKOL_ANY_GL)
  19902. _sg_gl_discard_sampler(smp);
  19903. #elif defined(SOKOL_METAL)
  19904. _sg_mtl_discard_sampler(smp);
  19905. #elif defined(SOKOL_D3D11)
  19906. _sg_d3d11_discard_sampler(smp);
  19907. #elif defined(SOKOL_WGPU)
  19908. _sg_wgpu_discard_sampler(smp);
  19909. #elif defined(SOKOL_VULKAN)
  19910. _sg_vk_discard_sampler(smp);
  19911. #elif defined(SOKOL_DUMMY_BACKEND)
  19912. _sg_dummy_discard_sampler(smp);
  19913. #else
  19914. #error("INVALID BACKEND");
  19915. #endif
  19916. }
  19917. static inline sg_resource_state _sg_create_shader(_sg_shader_t* shd, const sg_shader_desc* desc) {
  19918. #if defined(_SOKOL_ANY_GL)
  19919. return _sg_gl_create_shader(shd, desc);
  19920. #elif defined(SOKOL_METAL)
  19921. return _sg_mtl_create_shader(shd, desc);
  19922. #elif defined(SOKOL_D3D11)
  19923. return _sg_d3d11_create_shader(shd, desc);
  19924. #elif defined(SOKOL_WGPU)
  19925. return _sg_wgpu_create_shader(shd, desc);
  19926. #elif defined(SOKOL_VULKAN)
  19927. return _sg_vk_create_shader(shd, desc);
  19928. #elif defined(SOKOL_DUMMY_BACKEND)
  19929. return _sg_dummy_create_shader(shd, desc);
  19930. #else
  19931. #error("INVALID BACKEND");
  19932. #endif
  19933. }
  19934. static inline void _sg_discard_shader(_sg_shader_t* shd) {
  19935. #if defined(_SOKOL_ANY_GL)
  19936. _sg_gl_discard_shader(shd);
  19937. #elif defined(SOKOL_METAL)
  19938. _sg_mtl_discard_shader(shd);
  19939. #elif defined(SOKOL_D3D11)
  19940. _sg_d3d11_discard_shader(shd);
  19941. #elif defined(SOKOL_WGPU)
  19942. _sg_wgpu_discard_shader(shd);
  19943. #elif defined(SOKOL_VULKAN)
  19944. _sg_vk_discard_shader(shd);
  19945. #elif defined(SOKOL_DUMMY_BACKEND)
  19946. _sg_dummy_discard_shader(shd);
  19947. #else
  19948. #error("INVALID BACKEND");
  19949. #endif
  19950. }
  19951. static inline sg_resource_state _sg_create_pipeline(_sg_pipeline_t* pip, const sg_pipeline_desc* desc) {
  19952. #if defined(_SOKOL_ANY_GL)
  19953. return _sg_gl_create_pipeline(pip, desc);
  19954. #elif defined(SOKOL_METAL)
  19955. return _sg_mtl_create_pipeline(pip, desc);
  19956. #elif defined(SOKOL_D3D11)
  19957. return _sg_d3d11_create_pipeline(pip, desc);
  19958. #elif defined(SOKOL_WGPU)
  19959. return _sg_wgpu_create_pipeline(pip, desc);
  19960. #elif defined(SOKOL_VULKAN)
  19961. return _sg_vk_create_pipeline(pip, desc);
  19962. #elif defined(SOKOL_DUMMY_BACKEND)
  19963. return _sg_dummy_create_pipeline(pip, desc);
  19964. #else
  19965. #error("INVALID BACKEND");
  19966. #endif
  19967. }
  19968. static inline void _sg_discard_pipeline(_sg_pipeline_t* pip) {
  19969. #if defined(_SOKOL_ANY_GL)
  19970. _sg_gl_discard_pipeline(pip);
  19971. #elif defined(SOKOL_METAL)
  19972. _sg_mtl_discard_pipeline(pip);
  19973. #elif defined(SOKOL_D3D11)
  19974. _sg_d3d11_discard_pipeline(pip);
  19975. #elif defined(SOKOL_WGPU)
  19976. _sg_wgpu_discard_pipeline(pip);
  19977. #elif defined(SOKOL_VULKAN)
  19978. _sg_vk_discard_pipeline(pip);
  19979. #elif defined(SOKOL_DUMMY_BACKEND)
  19980. _sg_dummy_discard_pipeline(pip);
  19981. #else
  19982. #error("INVALID BACKEND");
  19983. #endif
  19984. }
  19985. static inline sg_resource_state _sg_create_view(_sg_view_t* view, const sg_view_desc* desc) {
  19986. #if defined(_SOKOL_ANY_GL)
  19987. return _sg_gl_create_view(view, desc);
  19988. #elif defined(SOKOL_METAL)
  19989. return _sg_mtl_create_view(view, desc);
  19990. #elif defined(SOKOL_D3D11)
  19991. return _sg_d3d11_create_view(view, desc);
  19992. #elif defined(SOKOL_WGPU)
  19993. return _sg_wgpu_create_view(view, desc);
  19994. #elif defined(SOKOL_VULKAN)
  19995. return _sg_vk_create_view(view, desc);
  19996. #elif defined(SOKOL_DUMMY_BACKEND)
  19997. return _sg_dummy_create_view(view, desc);
  19998. #else
  19999. #error("INVALID BACKEND");
  20000. #endif
  20001. }
  20002. static inline void _sg_discard_view(_sg_view_t* view) {
  20003. #if defined(_SOKOL_ANY_GL)
  20004. _sg_gl_discard_view(view);
  20005. #elif defined(SOKOL_METAL)
  20006. _sg_mtl_discard_view(view);
  20007. #elif defined(SOKOL_D3D11)
  20008. _sg_d3d11_discard_view(view);
  20009. #elif defined(SOKOL_WGPU)
  20010. _sg_wgpu_discard_view(view);
  20011. #elif defined(SOKOL_VULKAN)
  20012. _sg_vk_discard_view(view);
  20013. #elif defined(SOKOL_DUMMY_BACKEND)
  20014. _sg_dummy_discard_view(view);
  20015. #else
  20016. #error("INVALID BACKEND");
  20017. #endif
  20018. }
  20019. static inline void _sg_begin_pass(const sg_pass* pass, const _sg_attachments_ptrs_t* atts) {
  20020. #if defined(_SOKOL_ANY_GL)
  20021. _sg_gl_begin_pass(pass, atts);
  20022. #elif defined(SOKOL_METAL)
  20023. _sg_mtl_begin_pass(pass, atts);
  20024. #elif defined(SOKOL_D3D11)
  20025. _sg_d3d11_begin_pass(pass, atts);
  20026. #elif defined(SOKOL_WGPU)
  20027. _sg_wgpu_begin_pass(pass, atts);
  20028. #elif defined(SOKOL_VULKAN)
  20029. _sg_vk_begin_pass(pass, atts);
  20030. #elif defined(SOKOL_DUMMY_BACKEND)
  20031. _sg_dummy_begin_pass(pass, atts);
  20032. #else
  20033. #error("INVALID BACKEND");
  20034. #endif
  20035. }
  20036. static inline void _sg_end_pass(const _sg_attachments_ptrs_t* atts) {
  20037. #if defined(_SOKOL_ANY_GL)
  20038. _sg_gl_end_pass(atts);
  20039. #elif defined(SOKOL_METAL)
  20040. _sg_mtl_end_pass(atts);
  20041. #elif defined(SOKOL_D3D11)
  20042. _sg_d3d11_end_pass(atts);
  20043. #elif defined(SOKOL_WGPU)
  20044. _sg_wgpu_end_pass(atts);
  20045. #elif defined(SOKOL_VULKAN)
  20046. _sg_vk_end_pass(atts);
  20047. #elif defined(SOKOL_DUMMY_BACKEND)
  20048. _sg_dummy_end_pass(atts);
  20049. #else
  20050. #error("INVALID BACKEND");
  20051. #endif
  20052. }
  20053. static inline void _sg_apply_viewport(int x, int y, int w, int h, bool origin_top_left) {
  20054. #if defined(_SOKOL_ANY_GL)
  20055. _sg_gl_apply_viewport(x, y, w, h, origin_top_left);
  20056. #elif defined(SOKOL_METAL)
  20057. _sg_mtl_apply_viewport(x, y, w, h, origin_top_left);
  20058. #elif defined(SOKOL_D3D11)
  20059. _sg_d3d11_apply_viewport(x, y, w, h, origin_top_left);
  20060. #elif defined(SOKOL_WGPU)
  20061. _sg_wgpu_apply_viewport(x, y, w, h, origin_top_left);
  20062. #elif defined(SOKOL_VULKAN)
  20063. _sg_vk_apply_viewport(x, y, w, h, origin_top_left);
  20064. #elif defined(SOKOL_DUMMY_BACKEND)
  20065. _sg_dummy_apply_viewport(x, y, w, h, origin_top_left);
  20066. #else
  20067. #error("INVALID BACKEND");
  20068. #endif
  20069. }
  20070. static inline void _sg_apply_scissor_rect(int x, int y, int w, int h, bool origin_top_left) {
  20071. #if defined(_SOKOL_ANY_GL)
  20072. _sg_gl_apply_scissor_rect(x, y, w, h, origin_top_left);
  20073. #elif defined(SOKOL_METAL)
  20074. _sg_mtl_apply_scissor_rect(x, y, w, h, origin_top_left);
  20075. #elif defined(SOKOL_D3D11)
  20076. _sg_d3d11_apply_scissor_rect(x, y, w, h, origin_top_left);
  20077. #elif defined(SOKOL_WGPU)
  20078. _sg_wgpu_apply_scissor_rect(x, y, w, h, origin_top_left);
  20079. #elif defined(SOKOL_VULKAN)
  20080. _sg_vk_apply_scissor_rect(x, y, w, h, origin_top_left);
  20081. #elif defined(SOKOL_DUMMY_BACKEND)
  20082. _sg_dummy_apply_scissor_rect(x, y, w, h, origin_top_left);
  20083. #else
  20084. #error("INVALID BACKEND");
  20085. #endif
  20086. }
  20087. static inline void _sg_apply_pipeline(_sg_pipeline_t* pip) {
  20088. #if defined(_SOKOL_ANY_GL)
  20089. _sg_gl_apply_pipeline(pip);
  20090. #elif defined(SOKOL_METAL)
  20091. _sg_mtl_apply_pipeline(pip);
  20092. #elif defined(SOKOL_D3D11)
  20093. _sg_d3d11_apply_pipeline(pip);
  20094. #elif defined(SOKOL_WGPU)
  20095. _sg_wgpu_apply_pipeline(pip);
  20096. #elif defined(SOKOL_VULKAN)
  20097. _sg_vk_apply_pipeline(pip);
  20098. #elif defined(SOKOL_DUMMY_BACKEND)
  20099. _sg_dummy_apply_pipeline(pip);
  20100. #else
  20101. #error("INVALID BACKEND");
  20102. #endif
  20103. }
  20104. static inline bool _sg_apply_bindings(_sg_bindings_ptrs_t* bnd) {
  20105. #if defined(_SOKOL_ANY_GL)
  20106. return _sg_gl_apply_bindings(bnd);
  20107. #elif defined(SOKOL_METAL)
  20108. return _sg_mtl_apply_bindings(bnd);
  20109. #elif defined(SOKOL_D3D11)
  20110. return _sg_d3d11_apply_bindings(bnd);
  20111. #elif defined(SOKOL_WGPU)
  20112. return _sg_wgpu_apply_bindings(bnd);
  20113. #elif defined(SOKOL_VULKAN)
  20114. return _sg_vk_apply_bindings(bnd);
  20115. #elif defined(SOKOL_DUMMY_BACKEND)
  20116. return _sg_dummy_apply_bindings(bnd);
  20117. #else
  20118. #error("INVALID BACKEND");
  20119. #endif
  20120. }
  20121. static inline void _sg_apply_uniforms(int ub_slot, const sg_range* data) {
  20122. #if defined(_SOKOL_ANY_GL)
  20123. _sg_gl_apply_uniforms(ub_slot, data);
  20124. #elif defined(SOKOL_METAL)
  20125. _sg_mtl_apply_uniforms(ub_slot, data);
  20126. #elif defined(SOKOL_D3D11)
  20127. _sg_d3d11_apply_uniforms(ub_slot, data);
  20128. #elif defined(SOKOL_WGPU)
  20129. _sg_wgpu_apply_uniforms(ub_slot, data);
  20130. #elif defined(SOKOL_VULKAN)
  20131. _sg_vk_apply_uniforms(ub_slot, data);
  20132. #elif defined(SOKOL_DUMMY_BACKEND)
  20133. _sg_dummy_apply_uniforms(ub_slot, data);
  20134. #else
  20135. #error("INVALID BACKEND");
  20136. #endif
  20137. }
  20138. static inline void _sg_draw(int base_element, int num_elements, int num_instances, int base_vertex, int base_index) {
  20139. #if defined(_SOKOL_ANY_GL)
  20140. _sg_gl_draw(base_element, num_elements, num_instances, base_vertex, base_index);
  20141. #elif defined(SOKOL_METAL)
  20142. _sg_mtl_draw(base_element, num_elements, num_instances, base_vertex, base_index);
  20143. #elif defined(SOKOL_D3D11)
  20144. _sg_d3d11_draw(base_element, num_elements, num_instances, base_vertex, base_index);
  20145. #elif defined(SOKOL_WGPU)
  20146. _sg_wgpu_draw(base_element, num_elements, num_instances, base_vertex, base_index);
  20147. #elif defined(SOKOL_VULKAN)
  20148. _sg_vk_draw(base_element, num_elements, num_instances, base_vertex, base_index);
  20149. #elif defined(SOKOL_DUMMY_BACKEND)
  20150. _sg_dummy_draw(base_element, num_elements, num_instances, base_vertex, base_index);
  20151. #else
  20152. #error("INVALID BACKEND");
  20153. #endif
  20154. }
  20155. static inline void _sg_dispatch(int num_groups_x, int num_groups_y, int num_groups_z) {
  20156. #if defined(_SOKOL_ANY_GL)
  20157. _sg_gl_dispatch(num_groups_x, num_groups_y, num_groups_z);
  20158. #elif defined(SOKOL_METAL)
  20159. _sg_mtl_dispatch(num_groups_x, num_groups_y, num_groups_z);
  20160. #elif defined(SOKOL_D3D11)
  20161. _sg_d3d11_dispatch(num_groups_x, num_groups_y, num_groups_z);
  20162. #elif defined(SOKOL_WGPU)
  20163. _sg_wgpu_dispatch(num_groups_x, num_groups_y, num_groups_z);
  20164. #elif defined(SOKOL_VULKAN)
  20165. _sg_vk_dispatch(num_groups_x, num_groups_y, num_groups_z);
  20166. #elif defined(SOKOL_DUMMY_BACKEND)
  20167. _sg_dummy_dispatch(num_groups_x, num_groups_y, num_groups_z);
  20168. #else
  20169. #error("INVALID BACKEND");
  20170. #endif
  20171. }
  20172. static inline void _sg_commit(void) {
  20173. #if defined(_SOKOL_ANY_GL)
  20174. _sg_gl_commit();
  20175. #elif defined(SOKOL_METAL)
  20176. _sg_mtl_commit();
  20177. #elif defined(SOKOL_D3D11)
  20178. _sg_d3d11_commit();
  20179. #elif defined(SOKOL_WGPU)
  20180. _sg_wgpu_commit();
  20181. #elif defined(SOKOL_VULKAN)
  20182. _sg_vk_commit();
  20183. #elif defined(SOKOL_DUMMY_BACKEND)
  20184. _sg_dummy_commit();
  20185. #else
  20186. #error("INVALID BACKEND");
  20187. #endif
  20188. }
  20189. static inline void _sg_update_buffer(_sg_buffer_t* buf, const sg_range* data) {
  20190. #if defined(_SOKOL_ANY_GL)
  20191. _sg_gl_update_buffer(buf, data);
  20192. #elif defined(SOKOL_METAL)
  20193. _sg_mtl_update_buffer(buf, data);
  20194. #elif defined(SOKOL_D3D11)
  20195. _sg_d3d11_update_buffer(buf, data);
  20196. #elif defined(SOKOL_WGPU)
  20197. _sg_wgpu_update_buffer(buf, data);
  20198. #elif defined(SOKOL_VULKAN)
  20199. _sg_vk_update_buffer(buf, data);
  20200. #elif defined(SOKOL_DUMMY_BACKEND)
  20201. _sg_dummy_update_buffer(buf, data);
  20202. #else
  20203. #error("INVALID BACKEND");
  20204. #endif
  20205. }
  20206. static inline void _sg_append_buffer(_sg_buffer_t* buf, const sg_range* data, bool new_frame) {
  20207. #if defined(_SOKOL_ANY_GL)
  20208. _sg_gl_append_buffer(buf, data, new_frame);
  20209. #elif defined(SOKOL_METAL)
  20210. _sg_mtl_append_buffer(buf, data, new_frame);
  20211. #elif defined(SOKOL_D3D11)
  20212. _sg_d3d11_append_buffer(buf, data, new_frame);
  20213. #elif defined(SOKOL_WGPU)
  20214. _sg_wgpu_append_buffer(buf, data, new_frame);
  20215. #elif defined(SOKOL_VULKAN)
  20216. _sg_vk_append_buffer(buf, data, new_frame);
  20217. #elif defined(SOKOL_DUMMY_BACKEND)
  20218. _sg_dummy_append_buffer(buf, data, new_frame);
  20219. #else
  20220. #error("INVALID BACKEND");
  20221. #endif
  20222. }
  20223. static inline void _sg_update_image(_sg_image_t* img, const sg_image_data* data) {
  20224. #if defined(_SOKOL_ANY_GL)
  20225. _sg_gl_update_image(img, data);
  20226. #elif defined(SOKOL_METAL)
  20227. _sg_mtl_update_image(img, data);
  20228. #elif defined(SOKOL_D3D11)
  20229. _sg_d3d11_update_image(img, data);
  20230. #elif defined(SOKOL_WGPU)
  20231. _sg_wgpu_update_image(img, data);
  20232. #elif defined(SOKOL_VULKAN)
  20233. _sg_vk_update_image(img, data);
  20234. #elif defined(SOKOL_DUMMY_BACKEND)
  20235. _sg_dummy_update_image(img, data);
  20236. #else
  20237. #error("INVALID BACKEND");
  20238. #endif
  20239. }
  20240. static inline void _sg_push_debug_group(const char* name) {
  20241. #if defined(SOKOL_METAL)
  20242. _sg_mtl_push_debug_group(name);
  20243. #else
  20244. _SOKOL_UNUSED(name);
  20245. #endif
  20246. }
  20247. static inline void _sg_pop_debug_group(void) {
  20248. #if defined(SOKOL_METAL)
  20249. _sg_mtl_pop_debug_group();
  20250. #endif
  20251. }
  20252. // ██ ██ █████ ██ ██ ██████ █████ ████████ ██ ██████ ███ ██
  20253. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ████ ██
  20254. // ██ ██ ███████ ██ ██ ██ ██ ███████ ██ ██ ██ ██ ██ ██ ██
  20255. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  20256. // ████ ██ ██ ███████ ██ ██████ ██ ██ ██ ██ ██████ ██ ████
  20257. //
  20258. // >>validation
  20259. #if defined(SOKOL_DEBUG)
  20260. _SOKOL_PRIVATE void _sg_validate_begin(void) {
  20261. _sg.validate_error = SG_LOGITEM_OK;
  20262. }
  20263. _SOKOL_PRIVATE bool _sg_validate_end(void) {
  20264. if (_sg.validate_error != SG_LOGITEM_OK) {
  20265. #if !defined(SOKOL_VALIDATE_NON_FATAL)
  20266. _SG_PANIC(VALIDATION_FAILED);
  20267. return false;
  20268. #else
  20269. return false;
  20270. #endif
  20271. } else {
  20272. return true;
  20273. }
  20274. }
  20275. #endif
  20276. _SOKOL_PRIVATE bool _sg_one(bool b0, bool b1, bool b2) {
  20277. return (b0 && !b1 && !b2) || (!b0 && b1 && !b2) || (!b0 && !b1 && b2);
  20278. }
  20279. _SOKOL_PRIVATE bool _sg_validate_buffer_desc(const sg_buffer_desc* desc) {
  20280. #if !defined(SOKOL_DEBUG)
  20281. _SOKOL_UNUSED(desc);
  20282. return true;
  20283. #else
  20284. if (_sg.desc.disable_validation) {
  20285. return true;
  20286. }
  20287. SOKOL_ASSERT(desc);
  20288. _sg_validate_begin();
  20289. _SG_VALIDATE(desc->_start_canary == 0, VALIDATE_BUFFERDESC_CANARY);
  20290. _SG_VALIDATE(desc->_end_canary == 0, VALIDATE_BUFFERDESC_CANARY);
  20291. _SG_VALIDATE(desc->size > 0, VALIDATE_BUFFERDESC_EXPECT_NONZERO_SIZE);
  20292. _SG_VALIDATE(_sg_one(desc->usage.immutable, desc->usage.dynamic_update, desc->usage.stream_update), VALIDATE_BUFFERDESC_IMMUTABLE_DYNAMIC_STREAM);
  20293. if (_sg.features.separate_buffer_types) {
  20294. _SG_VALIDATE(_sg_one(desc->usage.vertex_buffer, desc->usage.index_buffer, desc->usage.storage_buffer), VALIDATE_BUFFERDESC_SEPARATE_BUFFER_TYPES);
  20295. }
  20296. bool injected = (0 != desc->gl_buffers[0]) ||
  20297. (0 != desc->mtl_buffers[0]) ||
  20298. (0 != desc->d3d11_buffer) ||
  20299. (0 != desc->wgpu_buffer);
  20300. if (!injected && desc->usage.immutable) {
  20301. if (desc->data.ptr) {
  20302. _SG_VALIDATE(desc->size == desc->data.size, VALIDATE_BUFFERDESC_EXPECT_MATCHING_DATA_SIZE);
  20303. } else {
  20304. _SG_VALIDATE(desc->usage.storage_buffer, VALIDATE_BUFFERDESC_EXPECT_DATA);
  20305. _SG_VALIDATE(desc->data.size == 0, VALIDATE_BUFFERDESC_EXPECT_ZERO_DATA_SIZE);
  20306. }
  20307. } else {
  20308. _SG_VALIDATE(0 == desc->data.ptr, VALIDATE_BUFFERDESC_EXPECT_NO_DATA);
  20309. _SG_VALIDATE(desc->data.size == 0, VALIDATE_BUFFERDESC_EXPECT_ZERO_DATA_SIZE);
  20310. }
  20311. if (desc->usage.storage_buffer) {
  20312. _SG_VALIDATE(_sg.features.compute, VALIDATE_BUFFERDESC_STORAGEBUFFER_SUPPORTED);
  20313. _SG_VALIDATE(_sg_multiple_u64(desc->size, 4), VALIDATE_BUFFERDESC_STORAGEBUFFER_SIZE_MULTIPLE_4);
  20314. }
  20315. return _sg_validate_end();
  20316. #endif
  20317. }
  20318. _SOKOL_PRIVATE void _sg_validate_image_data(const sg_image_data* data, sg_pixel_format fmt, int width, int height, int num_mips, int num_slices) {
  20319. #if !defined(SOKOL_DEBUG)
  20320. _SOKOL_UNUSED(data);
  20321. _SOKOL_UNUSED(fmt);
  20322. _SOKOL_UNUSED(width);
  20323. _SOKOL_UNUSED(height);
  20324. _SOKOL_UNUSED(num_mips);
  20325. _SOKOL_UNUSED(num_slices);
  20326. #else
  20327. for (int mip_index = 0; mip_index < num_mips; mip_index++) {
  20328. const bool has_data = data->mip_levels[mip_index].ptr != 0;
  20329. const bool has_size = data->mip_levels[mip_index].size > 0;
  20330. _SG_VALIDATE(has_data && has_size, VALIDATE_IMAGEDATA_NODATA);
  20331. const int mip_width = _sg_miplevel_dim(width, mip_index);
  20332. const int mip_height = _sg_miplevel_dim(height, mip_index);
  20333. const int bytes_per_slice = _sg_surface_pitch(fmt, mip_width, mip_height, 1);
  20334. const int expected_size = bytes_per_slice * num_slices;
  20335. _SG_VALIDATE(expected_size == (int)data->mip_levels[mip_index].size, VALIDATE_IMAGEDATA_DATA_SIZE);
  20336. }
  20337. #endif
  20338. }
  20339. _SOKOL_PRIVATE bool _sg_validate_image_desc(const sg_image_desc* desc) {
  20340. #if !defined(SOKOL_DEBUG)
  20341. _SOKOL_UNUSED(desc);
  20342. return true;
  20343. #else
  20344. if (_sg.desc.disable_validation) {
  20345. return true;
  20346. }
  20347. SOKOL_ASSERT(desc);
  20348. const sg_image_usage* usg = &desc->usage;
  20349. const bool any_attachment = usg->color_attachment || usg->resolve_attachment || usg->depth_stencil_attachment;
  20350. _sg_validate_begin();
  20351. _SG_VALIDATE(desc->_start_canary == 0, VALIDATE_IMAGEDESC_CANARY);
  20352. _SG_VALIDATE(desc->_end_canary == 0, VALIDATE_IMAGEDESC_CANARY);
  20353. _SG_VALIDATE(_sg_one(usg->immutable, usg->dynamic_update, usg->stream_update), VALIDATE_IMAGEDESC_IMMUTABLE_DYNAMIC_STREAM);
  20354. switch (desc->type) {
  20355. case SG_IMAGETYPE_2D:
  20356. _SG_VALIDATE(desc->num_slices == 1, VALIDATE_IMAGEDESC_IMAGETYPE_2D_NUMSLICES);
  20357. break;
  20358. case SG_IMAGETYPE_CUBE:
  20359. _SG_VALIDATE(desc->num_slices == 6, VALIDATE_IMAGEDESC_IMAGETYPE_CUBE_NUMSLICES);
  20360. break;
  20361. case SG_IMAGETYPE_ARRAY:
  20362. _SG_VALIDATE((desc->num_slices >= 1) && (desc->num_slices <= _sg.limits.max_image_array_layers), VALIDATE_IMAGEDESC_IMAGETYPE_ARRAY_NUMSLICES);
  20363. break;
  20364. case SG_IMAGETYPE_3D:
  20365. _SG_VALIDATE((desc->num_slices >= 1) && (desc->num_slices <= _sg.limits.max_image_size_3d), VALIDATE_IMAGEDESC_IMAGETYPE_3D_NUMSLICES);
  20366. break;
  20367. default:
  20368. SOKOL_UNREACHABLE;
  20369. break;
  20370. }
  20371. _SG_VALIDATE(desc->width > 0, VALIDATE_IMAGEDESC_WIDTH);
  20372. _SG_VALIDATE(desc->height > 0, VALIDATE_IMAGEDESC_HEIGHT);
  20373. const sg_pixel_format fmt = desc->pixel_format;
  20374. const bool injected = (0 != desc->gl_textures[0]) ||
  20375. (0 != desc->mtl_textures[0]) ||
  20376. (0 != desc->d3d11_texture) ||
  20377. (0 != desc->wgpu_texture);
  20378. if (_sg_is_depth_or_depth_stencil_format(fmt)) {
  20379. _SG_VALIDATE(desc->type != SG_IMAGETYPE_3D, VALIDATE_IMAGEDESC_DEPTH_3D_IMAGE);
  20380. }
  20381. if (any_attachment || usg->storage_image) {
  20382. SOKOL_ASSERT(((int)fmt >= 0) && ((int)fmt < _SG_PIXELFORMAT_NUM));
  20383. _SG_VALIDATE(usg->immutable, VALIDATE_IMAGEDESC_ATTACHMENT_EXPECT_IMMUTABLE);
  20384. _SG_VALIDATE(desc->data.mip_levels[0].ptr==0, VALIDATE_IMAGEDESC_ATTACHMENT_EXPECT_NO_DATA);
  20385. if (any_attachment) {
  20386. _SG_VALIDATE(_sg.formats[fmt].render, VALIDATE_IMAGEDESC_ATTACHMENT_PIXELFORMAT);
  20387. if (usg->resolve_attachment) {
  20388. _SG_VALIDATE(desc->sample_count == 1, VALIDATE_IMAGEDESC_ATTACHMENT_RESOLVE_EXPECT_NO_MSAA);
  20389. }
  20390. if (desc->sample_count > 1) {
  20391. _SG_VALIDATE(_sg.formats[fmt].msaa, VALIDATE_IMAGEDESC_ATTACHMENT_NO_MSAA_SUPPORT);
  20392. _SG_VALIDATE(desc->num_mipmaps == 1, VALIDATE_IMAGEDESC_ATTACHMENT_MSAA_NUM_MIPMAPS);
  20393. _SG_VALIDATE(desc->type != SG_IMAGETYPE_ARRAY, VALIDATE_IMAGEDESC_ATTACHMENT_MSAA_ARRAY_IMAGE);
  20394. _SG_VALIDATE(desc->type != SG_IMAGETYPE_3D, VALIDATE_IMAGEDESC_ATTACHMENT_MSAA_3D_IMAGE);
  20395. _SG_VALIDATE(desc->type != SG_IMAGETYPE_CUBE, VALIDATE_IMAGEDESC_ATTACHMENT_MSAA_CUBE_IMAGE);
  20396. }
  20397. } else if (usg->storage_image) {
  20398. _SG_VALIDATE(_sg_is_valid_storage_image_format(fmt), VALIDATE_IMAGEDESC_STORAGEIMAGE_PIXELFORMAT);
  20399. // D3D11 doesn't allow multisampled UAVs (see: https://github.com/gpuweb/gpuweb/issues/513)
  20400. _SG_VALIDATE(desc->sample_count == 1, VALIDATE_IMAGEDESC_STORAGEIMAGE_EXPECT_NO_MSAA);
  20401. }
  20402. } else {
  20403. _SG_VALIDATE(desc->sample_count == 1, VALIDATE_IMAGEDESC_MSAA_BUT_NO_ATTACHMENT);
  20404. const bool valid_nonrt_fmt = !_sg_is_valid_attachment_depth_format(fmt);
  20405. _SG_VALIDATE(valid_nonrt_fmt, VALIDATE_IMAGEDESC_NONRT_PIXELFORMAT);
  20406. const bool is_compressed = _sg_is_compressed_pixel_format(desc->pixel_format);
  20407. if (is_compressed) {
  20408. _SG_VALIDATE(usg->immutable, VALIDATE_IMAGEDESC_COMPRESSED_IMMUTABLE);
  20409. }
  20410. if (!injected && usg->immutable) {
  20411. // image desc must have valid data
  20412. _sg_validate_image_data(&desc->data,
  20413. desc->pixel_format,
  20414. desc->width,
  20415. desc->height,
  20416. desc->num_mipmaps,
  20417. desc->num_slices);
  20418. } else {
  20419. // image desc must not have data
  20420. for (int mip_index = 0; mip_index < SG_MAX_MIPMAPS; mip_index++) {
  20421. const bool no_data = 0 == desc->data.mip_levels[mip_index].ptr;
  20422. const bool no_size = 0 == desc->data.mip_levels[mip_index].size;
  20423. if (injected) {
  20424. _SG_VALIDATE(no_data && no_size, VALIDATE_IMAGEDESC_INJECTED_NO_DATA);
  20425. }
  20426. if (!usg->immutable) {
  20427. _SG_VALIDATE(no_data && no_size, VALIDATE_IMAGEDESC_DYNAMIC_NO_DATA);
  20428. }
  20429. }
  20430. }
  20431. }
  20432. return _sg_validate_end();
  20433. #endif
  20434. }
  20435. _SOKOL_PRIVATE bool _sg_validate_sampler_desc(const sg_sampler_desc* desc) {
  20436. #if !defined(SOKOL_DEBUG)
  20437. _SOKOL_UNUSED(desc);
  20438. return true;
  20439. #else
  20440. if (_sg.desc.disable_validation) {
  20441. return true;
  20442. }
  20443. SOKOL_ASSERT(desc);
  20444. _sg_validate_begin();
  20445. _SG_VALIDATE(desc->_start_canary == 0, VALIDATE_SAMPLERDESC_CANARY);
  20446. _SG_VALIDATE(desc->_end_canary == 0, VALIDATE_SAMPLERDESC_CANARY);
  20447. // restriction from WebGPU: when anisotropy > 1, all filters must be linear
  20448. if (desc->max_anisotropy > 1) {
  20449. _SG_VALIDATE((desc->min_filter == SG_FILTER_LINEAR)
  20450. && (desc->mag_filter == SG_FILTER_LINEAR)
  20451. && (desc->mipmap_filter == SG_FILTER_LINEAR),
  20452. VALIDATE_SAMPLERDESC_ANISTROPIC_REQUIRES_LINEAR_FILTERING);
  20453. }
  20454. return _sg_validate_end();
  20455. #endif
  20456. }
  20457. typedef struct {
  20458. uint64_t lo, hi;
  20459. } _sg_u128_t;
  20460. _SOKOL_PRIVATE _sg_u128_t _sg_u128(void) {
  20461. _SG_STRUCT(_sg_u128_t, res);
  20462. return res;
  20463. }
  20464. _SOKOL_PRIVATE _sg_u128_t _sg_validate_set_slot_bit(_sg_u128_t bits, sg_shader_stage stage, uint8_t slot) {
  20465. switch (stage) {
  20466. case SG_SHADERSTAGE_NONE:
  20467. SOKOL_ASSERT(slot < 128);
  20468. if (slot < 64) {
  20469. bits.lo |= 1ULL << slot;
  20470. } else {
  20471. bits.hi |= 1ULL << (slot - 64);
  20472. }
  20473. break;
  20474. case SG_SHADERSTAGE_VERTEX:
  20475. SOKOL_ASSERT(slot < 64);
  20476. bits.lo |= 1ULL << slot;
  20477. break;
  20478. case SG_SHADERSTAGE_FRAGMENT:
  20479. SOKOL_ASSERT(slot < 64);
  20480. bits.hi |= 1ULL << slot;
  20481. break;
  20482. case SG_SHADERSTAGE_COMPUTE:
  20483. SOKOL_ASSERT(slot < 64);
  20484. bits.lo |= 1ULL << slot;
  20485. break;
  20486. default:
  20487. SOKOL_UNREACHABLE;
  20488. break;
  20489. }
  20490. return bits;
  20491. }
  20492. _SOKOL_PRIVATE bool _sg_validate_slot_bits(_sg_u128_t bits, sg_shader_stage stage, uint8_t slot) {
  20493. _sg_u128_t mask = _sg_u128();
  20494. switch (stage) {
  20495. case SG_SHADERSTAGE_NONE:
  20496. SOKOL_ASSERT(slot < 128);
  20497. if (slot < 64) {
  20498. mask.lo = 1ULL << slot;
  20499. } else {
  20500. mask.hi = 1ULL << (slot - 64);
  20501. }
  20502. break;
  20503. case SG_SHADERSTAGE_VERTEX:
  20504. SOKOL_ASSERT(slot < 64);
  20505. mask.lo = 1ULL << slot;
  20506. break;
  20507. case SG_SHADERSTAGE_FRAGMENT:
  20508. SOKOL_ASSERT(slot < 64);
  20509. mask.hi = 1ULL << slot;
  20510. break;
  20511. case SG_SHADERSTAGE_COMPUTE:
  20512. SOKOL_ASSERT(slot < 64);
  20513. mask.lo = 1ULL << slot;
  20514. break;
  20515. default:
  20516. SOKOL_UNREACHABLE;
  20517. break;
  20518. }
  20519. return ((bits.lo & mask.lo) == 0) && ((bits.hi & mask.hi) == 0);
  20520. }
  20521. _SOKOL_PRIVATE bool _sg_validate_shader_desc(const sg_shader_desc* desc) {
  20522. #if !defined(SOKOL_DEBUG)
  20523. _SOKOL_UNUSED(desc);
  20524. return true;
  20525. #else
  20526. if (_sg.desc.disable_validation) {
  20527. return true;
  20528. }
  20529. SOKOL_ASSERT(desc);
  20530. bool is_compute_shader = (desc->compute_func.source != 0) || (desc->compute_func.bytecode.ptr != 0);
  20531. _sg_validate_begin();
  20532. _SG_VALIDATE(desc->_start_canary == 0, VALIDATE_SHADERDESC_CANARY);
  20533. _SG_VALIDATE(desc->_end_canary == 0, VALIDATE_SHADERDESC_CANARY);
  20534. #if defined(SOKOL_GLCORE) || defined(SOKOL_GLES3) || defined(SOKOL_WGPU)
  20535. // on GL or WebGPU, must provide shader source code
  20536. if (is_compute_shader) {
  20537. _SG_VALIDATE(0 != desc->compute_func.source, VALIDATE_SHADERDESC_COMPUTE_SOURCE);
  20538. } else {
  20539. _SG_VALIDATE(0 != desc->vertex_func.source, VALIDATE_SHADERDESC_VERTEX_SOURCE);
  20540. _SG_VALIDATE(0 != desc->fragment_func.source, VALIDATE_SHADERDESC_FRAGMENT_SOURCE);
  20541. }
  20542. #elif defined(SOKOL_METAL) || defined(SOKOL_D3D11)
  20543. // on Metal or D3D11, must provide shader source code or byte code
  20544. if (is_compute_shader) {
  20545. _SG_VALIDATE((0 != desc->compute_func.source) || (0 != desc->compute_func.bytecode.ptr), VALIDATE_SHADERDESC_COMPUTE_SOURCE_OR_BYTECODE);
  20546. } else {
  20547. _SG_VALIDATE((0 != desc->vertex_func.source)|| (0 != desc->vertex_func.bytecode.ptr), VALIDATE_SHADERDESC_VERTEX_SOURCE_OR_BYTECODE);
  20548. _SG_VALIDATE((0 != desc->fragment_func.source) || (0 != desc->fragment_func.bytecode.ptr), VALIDATE_SHADERDESC_FRAGMENT_SOURCE_OR_BYTECODE);
  20549. }
  20550. #else
  20551. // Dummy Backend, don't require source or bytecode
  20552. #endif
  20553. if (is_compute_shader) {
  20554. _SG_VALIDATE((0 == desc->vertex_func.source) && (0 == desc->vertex_func.bytecode.ptr), VALIDATE_SHADERDESC_INVALID_SHADER_COMBO);
  20555. _SG_VALIDATE((0 == desc->fragment_func.source) && (0 == desc->fragment_func.bytecode.ptr), VALIDATE_SHADERDESC_INVALID_SHADER_COMBO);
  20556. } else {
  20557. _SG_VALIDATE((0 == desc->compute_func.source) && (0 == desc->compute_func.bytecode.ptr), VALIDATE_SHADERDESC_INVALID_SHADER_COMBO);
  20558. }
  20559. #if defined(SOKOL_METAL)
  20560. if (is_compute_shader) {
  20561. int x = desc->mtl_threads_per_threadgroup.x;
  20562. int y = desc->mtl_threads_per_threadgroup.y;
  20563. int z = desc->mtl_threads_per_threadgroup.z;
  20564. _SG_VALIDATE((x > 0) && (y > 0) && (z > 0), VALIDATE_SHADERDESC_METAL_THREADS_PER_THREADGROUP_INITIALIZED);
  20565. _SG_VALIDATE(((x * y * z) & 31) == 0, VALIDATE_SHADERDESC_METAL_THREADS_PER_THREADGROUP_MULTIPLE_32);
  20566. }
  20567. #endif
  20568. for (size_t i = 0; i < SG_MAX_VERTEX_ATTRIBUTES; i++) {
  20569. if (desc->attrs[i].glsl_name) {
  20570. _SG_VALIDATE(strlen(desc->attrs[i].glsl_name) < _SG_STRING_SIZE, VALIDATE_SHADERDESC_ATTR_STRING_TOO_LONG);
  20571. }
  20572. if (desc->attrs[i].hlsl_sem_name) {
  20573. _SG_VALIDATE(strlen(desc->attrs[i].hlsl_sem_name) < _SG_STRING_SIZE, VALIDATE_SHADERDESC_ATTR_STRING_TOO_LONG);
  20574. }
  20575. }
  20576. // if shader byte code, the size must also be provided
  20577. if (0 != desc->vertex_func.bytecode.ptr) {
  20578. _SG_VALIDATE(desc->vertex_func.bytecode.size > 0, VALIDATE_SHADERDESC_NO_BYTECODE_SIZE);
  20579. }
  20580. if (0 != desc->fragment_func.bytecode.ptr) {
  20581. _SG_VALIDATE(desc->fragment_func.bytecode.size > 0, VALIDATE_SHADERDESC_NO_BYTECODE_SIZE);
  20582. }
  20583. if (0 != desc->compute_func.bytecode.ptr) {
  20584. _SG_VALIDATE(desc->compute_func.bytecode.size > 0, VALIDATE_SHADERDESC_NO_BYTECODE_SIZE);
  20585. }
  20586. #if defined(SOKOL_METAL)
  20587. _sg_u128_t msl_buf_bits = _sg_u128();
  20588. _sg_u128_t msl_tex_bits = _sg_u128();
  20589. _sg_u128_t msl_smp_bits = _sg_u128();
  20590. #elif defined(SOKOL_D3D11)
  20591. _sg_u128_t hlsl_buf_bits = _sg_u128();
  20592. _sg_u128_t hlsl_srv_bits = _sg_u128();
  20593. _sg_u128_t hlsl_uav_bits = _sg_u128();
  20594. _sg_u128_t hlsl_smp_bits = _sg_u128();
  20595. #elif defined(_SOKOL_ANY_GL)
  20596. _sg_u128_t glsl_sbuf_bnd_bits = _sg_u128();
  20597. _sg_u128_t glsl_simg_bnd_bits = _sg_u128();
  20598. #elif defined(SOKOL_WGPU)
  20599. _sg_u128_t wgsl_group0_bits = _sg_u128();
  20600. _sg_u128_t wgsl_group1_bits = _sg_u128();
  20601. #elif defined(SOKOL_VULKAN)
  20602. _sg_u128_t spirv_set0_bits = _sg_u128();
  20603. _sg_u128_t spirv_set1_bits = _sg_u128();
  20604. #endif
  20605. for (size_t ub_idx = 0; ub_idx < SG_MAX_UNIFORMBLOCK_BINDSLOTS; ub_idx++) {
  20606. const sg_shader_uniform_block* ub_desc = &desc->uniform_blocks[ub_idx];
  20607. if (ub_desc->stage == SG_SHADERSTAGE_NONE) {
  20608. continue;
  20609. }
  20610. _SG_VALIDATE(ub_desc->size > 0, VALIDATE_SHADERDESC_UNIFORMBLOCK_SIZE_IS_ZERO);
  20611. #if defined(SOKOL_METAL)
  20612. _SG_VALIDATE(_sg_validate_slot_bits(msl_buf_bits, ub_desc->stage, ub_desc->msl_buffer_n), VALIDATE_SHADERDESC_UNIFORMBLOCK_METAL_BUFFER_SLOT_COLLISION);
  20613. msl_buf_bits = _sg_validate_set_slot_bit(msl_buf_bits, ub_desc->stage, ub_desc->msl_buffer_n);
  20614. #elif defined(SOKOL_D3D11)
  20615. _SG_VALIDATE(_sg_validate_slot_bits(hlsl_buf_bits, ub_desc->stage, ub_desc->hlsl_register_b_n), VALIDATE_SHADERDESC_UNIFORMBLOCK_HLSL_REGISTER_B_COLLISION);
  20616. hlsl_buf_bits = _sg_validate_set_slot_bit(hlsl_buf_bits, ub_desc->stage, ub_desc->hlsl_register_b_n);
  20617. #elif defined(SOKOL_WGPU)
  20618. _SG_VALIDATE(_sg_validate_slot_bits(wgsl_group0_bits, SG_SHADERSTAGE_NONE, ub_desc->wgsl_group0_binding_n), VALIDATE_SHADERDESC_UNIFORMBLOCK_WGSL_GROUP0_BINDING_COLLISION);
  20619. wgsl_group0_bits = _sg_validate_set_slot_bit(wgsl_group0_bits, SG_SHADERSTAGE_NONE, ub_desc->wgsl_group0_binding_n);
  20620. #elif defined(SOKOL_VULKAN)
  20621. _SG_VALIDATE(_sg_validate_slot_bits(spirv_set0_bits, SG_SHADERSTAGE_NONE, ub_desc->spirv_set0_binding_n), VALIDATE_SHADERDESC_UNIFORMBLOCK_SPIRV_SET0_BINDING_COLLISION);
  20622. spirv_set0_bits = _sg_validate_set_slot_bit(spirv_set0_bits, SG_SHADERSTAGE_NONE, ub_desc->spirv_set0_binding_n);
  20623. #endif
  20624. #if defined(_SOKOL_ANY_GL)
  20625. bool uniforms_continuous = true;
  20626. uint32_t uniform_offset = 0;
  20627. int num_uniforms = 0;
  20628. for (size_t u_index = 0; u_index < SG_MAX_UNIFORMBLOCK_MEMBERS; u_index++) {
  20629. const sg_glsl_shader_uniform* u_desc = &ub_desc->glsl_uniforms[u_index];
  20630. if (u_desc->type != SG_UNIFORMTYPE_INVALID) {
  20631. _SG_VALIDATE(uniforms_continuous, VALIDATE_SHADERDESC_UNIFORMBLOCK_NO_CONT_MEMBERS);
  20632. _SG_VALIDATE(u_desc->glsl_name, VALIDATE_SHADERDESC_UNIFORMBLOCK_UNIFORM_GLSL_NAME);
  20633. const int array_count = u_desc->array_count;
  20634. _SG_VALIDATE(array_count > 0, VALIDATE_SHADERDESC_UNIFORMBLOCK_ARRAY_COUNT);
  20635. const uint32_t u_align = _sg_uniform_alignment(u_desc->type, array_count, ub_desc->layout);
  20636. const uint32_t u_size = _sg_uniform_size(u_desc->type, array_count, ub_desc->layout);
  20637. uniform_offset = _sg_align_u32(uniform_offset, u_align);
  20638. uniform_offset += u_size;
  20639. num_uniforms++;
  20640. // with std140, arrays are only allowed for FLOAT4, INT4, MAT4
  20641. if (ub_desc->layout == SG_UNIFORMLAYOUT_STD140) {
  20642. if (array_count > 1) {
  20643. _SG_VALIDATE((u_desc->type == SG_UNIFORMTYPE_FLOAT4) || (u_desc->type == SG_UNIFORMTYPE_INT4) || (u_desc->type == SG_UNIFORMTYPE_MAT4), VALIDATE_SHADERDESC_UNIFORMBLOCK_STD140_ARRAY_TYPE);
  20644. }
  20645. }
  20646. } else {
  20647. uniforms_continuous = false;
  20648. }
  20649. }
  20650. if (ub_desc->layout == SG_UNIFORMLAYOUT_STD140) {
  20651. uniform_offset = _sg_align_u32(uniform_offset, 16);
  20652. }
  20653. _SG_VALIDATE((size_t)uniform_offset == ub_desc->size, VALIDATE_SHADERDESC_UNIFORMBLOCK_SIZE_MISMATCH);
  20654. _SG_VALIDATE(num_uniforms > 0, VALIDATE_SHADERDESC_UNIFORMBLOCK_NO_MEMBERS);
  20655. #endif
  20656. }
  20657. uint32_t texview_slot_mask = 0;
  20658. for (size_t view_idx = 0; view_idx < SG_MAX_VIEW_BINDSLOTS; view_idx++) {
  20659. const sg_shader_view* view_desc = &desc->views[view_idx];
  20660. if (view_desc->texture.stage != SG_SHADERSTAGE_NONE) {
  20661. const sg_shader_texture_view* tex_desc = &view_desc->texture;
  20662. texview_slot_mask |= (1 << view_idx);
  20663. #if defined(SOKOL_METAL)
  20664. _SG_VALIDATE(_sg_validate_slot_bits(msl_tex_bits, tex_desc->stage, tex_desc->msl_texture_n), VALIDATE_SHADERDESC_VIEW_TEXTURE_METAL_TEXTURE_SLOT_COLLISION);
  20665. msl_tex_bits = _sg_validate_set_slot_bit(msl_tex_bits, tex_desc->stage, tex_desc->msl_texture_n);
  20666. #elif defined(SOKOL_D3D11)
  20667. _SG_VALIDATE(_sg_validate_slot_bits(hlsl_srv_bits, tex_desc->stage, tex_desc->hlsl_register_t_n), VALIDATE_SHADERDESC_VIEW_TEXTURE_HLSL_REGISTER_T_COLLISION);
  20668. hlsl_srv_bits = _sg_validate_set_slot_bit(hlsl_srv_bits, tex_desc->stage, tex_desc->hlsl_register_t_n);
  20669. #elif defined(SOKOL_WGPU)
  20670. _SG_VALIDATE(_sg_validate_slot_bits(wgsl_group1_bits, SG_SHADERSTAGE_NONE, tex_desc->wgsl_group1_binding_n), VALIDATE_SHADERDESC_VIEW_TEXTURE_WGSL_GROUP1_BINDING_COLLISION);
  20671. wgsl_group1_bits = _sg_validate_set_slot_bit(wgsl_group1_bits, SG_SHADERSTAGE_NONE, tex_desc->wgsl_group1_binding_n);
  20672. #elif defined(SOKOL_VULKAN)
  20673. _SG_VALIDATE(_sg_validate_slot_bits(spirv_set1_bits, SG_SHADERSTAGE_NONE, tex_desc->spirv_set1_binding_n), VALIDATE_SHADERDESC_VIEW_TEXTURE_SPIRV_SET1_BINDING_COLLISION);
  20674. spirv_set1_bits = _sg_validate_set_slot_bit(spirv_set1_bits, SG_SHADERSTAGE_NONE, tex_desc->spirv_set1_binding_n);
  20675. #elif defined(SOKOL_DUMMY_BACKEND) || defined(_SOKOL_ANY_GL)
  20676. _SOKOL_UNUSED(tex_desc);
  20677. #endif
  20678. } else if (view_desc->storage_buffer.stage != SG_SHADERSTAGE_NONE) {
  20679. const sg_shader_storage_buffer_view* sbuf_desc = &view_desc->storage_buffer;
  20680. #if defined(SOKOL_METAL)
  20681. _SG_VALIDATE(_sg_validate_slot_bits(msl_buf_bits, sbuf_desc->stage, sbuf_desc->msl_buffer_n), VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_METAL_BUFFER_SLOT_COLLISION);
  20682. msl_buf_bits = _sg_validate_set_slot_bit(msl_buf_bits, sbuf_desc->stage, sbuf_desc->msl_buffer_n);
  20683. #elif defined(SOKOL_D3D11)
  20684. if (sbuf_desc->readonly) {
  20685. _SG_VALIDATE(_sg_validate_slot_bits(hlsl_srv_bits, sbuf_desc->stage, sbuf_desc->hlsl_register_t_n), VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_HLSL_REGISTER_T_COLLISION);
  20686. hlsl_srv_bits = _sg_validate_set_slot_bit(hlsl_srv_bits, sbuf_desc->stage, sbuf_desc->hlsl_register_t_n);
  20687. } else {
  20688. _SG_VALIDATE(_sg_validate_slot_bits(hlsl_uav_bits, sbuf_desc->stage, sbuf_desc->hlsl_register_u_n), VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_HLSL_REGISTER_U_COLLISION);
  20689. hlsl_uav_bits = _sg_validate_set_slot_bit(hlsl_uav_bits, sbuf_desc->stage, sbuf_desc->hlsl_register_u_n);
  20690. }
  20691. #elif defined(_SOKOL_ANY_GL)
  20692. _SG_VALIDATE(_sg_validate_slot_bits(glsl_sbuf_bnd_bits, SG_SHADERSTAGE_NONE, sbuf_desc->glsl_binding_n), VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_GLSL_BINDING_COLLISION);
  20693. glsl_sbuf_bnd_bits = _sg_validate_set_slot_bit(glsl_sbuf_bnd_bits, SG_SHADERSTAGE_NONE, sbuf_desc->glsl_binding_n);
  20694. #elif defined(SOKOL_WGPU)
  20695. _SG_VALIDATE(_sg_validate_slot_bits(wgsl_group1_bits, SG_SHADERSTAGE_NONE, sbuf_desc->wgsl_group1_binding_n), VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_WGSL_GROUP1_BINDING_COLLISION);
  20696. wgsl_group1_bits = _sg_validate_set_slot_bit(wgsl_group1_bits, SG_SHADERSTAGE_NONE, sbuf_desc->wgsl_group1_binding_n);
  20697. #elif defined(SOKOL_VULKAN)
  20698. _SG_VALIDATE(_sg_validate_slot_bits(spirv_set1_bits, SG_SHADERSTAGE_NONE, sbuf_desc->spirv_set1_binding_n), VALIDATE_SHADERDESC_VIEW_STORAGEBUFFER_SPIRV_SET1_BINDING_COLLISION);
  20699. spirv_set1_bits = _sg_validate_set_slot_bit(spirv_set1_bits, SG_SHADERSTAGE_NONE, sbuf_desc->spirv_set1_binding_n);
  20700. #elif defined(SOKOL_DUMMY_BACKEND)
  20701. _SOKOL_UNUSED(sbuf_desc);
  20702. #endif
  20703. } else if (view_desc->storage_image.stage != SG_SHADERSTAGE_NONE) {
  20704. const sg_shader_storage_image_view* simg_desc = &view_desc->storage_image;
  20705. _SG_VALIDATE(simg_desc->stage == SG_SHADERSTAGE_COMPUTE, VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_EXPECT_COMPUTE_STAGE);
  20706. #if defined(SOKOL_METAL)
  20707. _SG_VALIDATE(_sg_validate_slot_bits(msl_tex_bits, simg_desc->stage, simg_desc->msl_texture_n), VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_METAL_TEXTURE_SLOT_COLLISION);
  20708. msl_tex_bits = _sg_validate_set_slot_bit(msl_tex_bits, simg_desc->stage, simg_desc->msl_texture_n);
  20709. #elif defined(SOKOL_D3D11)
  20710. _SG_VALIDATE(_sg_validate_slot_bits(hlsl_uav_bits, simg_desc->stage, simg_desc->hlsl_register_u_n), VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_HLSL_REGISTER_U_COLLISION);
  20711. hlsl_uav_bits = _sg_validate_set_slot_bit(hlsl_uav_bits, simg_desc->stage, simg_desc->hlsl_register_u_n);
  20712. #elif defined(_SOKOL_ANY_GL)
  20713. _SG_VALIDATE(_sg_validate_slot_bits(glsl_simg_bnd_bits, SG_SHADERSTAGE_NONE, simg_desc->glsl_binding_n), VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_GLSL_BINDING_COLLISION);
  20714. glsl_simg_bnd_bits = _sg_validate_set_slot_bit(glsl_simg_bnd_bits, SG_SHADERSTAGE_NONE, simg_desc->glsl_binding_n);
  20715. #elif defined(SOKOL_WGPU)
  20716. _SG_VALIDATE(_sg_validate_slot_bits(wgsl_group1_bits, SG_SHADERSTAGE_NONE, simg_desc->wgsl_group1_binding_n), VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_WGSL_GROUP1_BINDING_COLLISION);
  20717. wgsl_group1_bits = _sg_validate_set_slot_bit(wgsl_group1_bits, SG_SHADERSTAGE_NONE, simg_desc->wgsl_group1_binding_n);
  20718. #elif defined(SOKOL_VULKAN)
  20719. _SG_VALIDATE(_sg_validate_slot_bits(spirv_set1_bits, SG_SHADERSTAGE_NONE, simg_desc->spirv_set1_binding_n), VALIDATE_SHADERDESC_VIEW_STORAGEIMAGE_SPIRV_SET1_BINDING_COLLISION);
  20720. spirv_set1_bits = _sg_validate_set_slot_bit(spirv_set1_bits, SG_SHADERSTAGE_NONE, simg_desc->spirv_set1_binding_n);
  20721. #endif
  20722. }
  20723. }
  20724. uint32_t smp_slot_mask = 0;
  20725. for (size_t smp_idx = 0; smp_idx < SG_MAX_SAMPLER_BINDSLOTS; smp_idx++) {
  20726. const sg_shader_sampler* smp_desc = &desc->samplers[smp_idx];
  20727. if (smp_desc->stage == SG_SHADERSTAGE_NONE) {
  20728. continue;
  20729. }
  20730. smp_slot_mask |= (1 << smp_idx);
  20731. #if defined(SOKOL_METAL)
  20732. _SG_VALIDATE(_sg_validate_slot_bits(msl_smp_bits, smp_desc->stage, smp_desc->msl_sampler_n), VALIDATE_SHADERDESC_SAMPLER_METAL_SAMPLER_SLOT_COLLISION);
  20733. msl_smp_bits = _sg_validate_set_slot_bit(msl_smp_bits, smp_desc->stage, smp_desc->msl_sampler_n);
  20734. #elif defined(SOKOL_D3D11)
  20735. _SG_VALIDATE(_sg_validate_slot_bits(hlsl_smp_bits, smp_desc->stage, smp_desc->hlsl_register_s_n), VALIDATE_SHADERDESC_SAMPLER_HLSL_REGISTER_S_COLLISION);
  20736. hlsl_smp_bits = _sg_validate_set_slot_bit(hlsl_smp_bits, smp_desc->stage, smp_desc->hlsl_register_s_n);
  20737. #elif defined(SOKOL_WGPU)
  20738. _SG_VALIDATE(_sg_validate_slot_bits(wgsl_group1_bits, SG_SHADERSTAGE_NONE, smp_desc->wgsl_group1_binding_n), VALIDATE_SHADERDESC_SAMPLER_WGSL_GROUP1_BINDING_COLLISION);
  20739. wgsl_group1_bits = _sg_validate_set_slot_bit(wgsl_group1_bits, SG_SHADERSTAGE_NONE, smp_desc->wgsl_group1_binding_n);
  20740. #elif defined(SOKOL_VULKAN)
  20741. _SG_VALIDATE(_sg_validate_slot_bits(spirv_set1_bits, SG_SHADERSTAGE_NONE, smp_desc->spirv_set1_binding_n), VALIDATE_SHADERDESC_SAMPLER_SPIRV_SET1_BINDING_COLLISION);
  20742. spirv_set1_bits = _sg_validate_set_slot_bit(spirv_set1_bits, SG_SHADERSTAGE_NONE, smp_desc->spirv_set1_binding_n);
  20743. #endif
  20744. }
  20745. uint32_t ref_texview_slot_mask = 0;
  20746. uint32_t ref_smp_slot_mask = 0;
  20747. for (size_t tex_smp_idx = 0; tex_smp_idx < SG_MAX_TEXTURE_SAMPLER_PAIRS; tex_smp_idx++) {
  20748. const sg_shader_texture_sampler_pair* tex_smp_desc = &desc->texture_sampler_pairs[tex_smp_idx];
  20749. if (tex_smp_desc->stage == SG_SHADERSTAGE_NONE) {
  20750. continue;
  20751. }
  20752. #if defined(_SOKOL_ANY_GL)
  20753. _SG_VALIDATE(tex_smp_desc->glsl_name != 0, VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_GLSL_NAME);
  20754. #endif
  20755. const bool view_slot_in_range = tex_smp_desc->view_slot < SG_MAX_VIEW_BINDSLOTS;
  20756. const bool smp_slot_in_range = tex_smp_desc->sampler_slot < SG_MAX_SAMPLER_BINDSLOTS;
  20757. _SG_VALIDATE(view_slot_in_range, VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_VIEW_SLOT_OUT_OF_RANGE);
  20758. _SG_VALIDATE(smp_slot_in_range, VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_SAMPLER_SLOT_OUT_OF_RANGE);
  20759. if (view_slot_in_range && smp_slot_in_range) {
  20760. ref_texview_slot_mask |= 1 << tex_smp_desc->view_slot;
  20761. ref_smp_slot_mask |= 1 << tex_smp_desc->sampler_slot;
  20762. const sg_shader_view* view_desc = &desc->views[tex_smp_desc->view_slot];
  20763. const sg_shader_sampler* smp_desc = &desc->samplers[tex_smp_desc->sampler_slot];
  20764. _SG_VALIDATE(view_desc->texture.stage != SG_SHADERSTAGE_NONE, VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_EXPECT_TEXTURE_VIEW);
  20765. _SG_VALIDATE(view_desc->texture.stage == tex_smp_desc->stage, VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_TEXTURE_STAGE_MISMATCH);
  20766. _SG_VALIDATE(smp_desc->stage == tex_smp_desc->stage, VALIDATE_SHADERDESC_TEXTURE_SAMPLER_PAIR_SAMPLER_STAGE_MISMATCH);
  20767. const bool needs_nonfiltering = (view_desc->texture.sample_type == SG_IMAGESAMPLETYPE_UINT)
  20768. || (view_desc->texture.sample_type == SG_IMAGESAMPLETYPE_SINT)
  20769. || (view_desc->texture.sample_type == SG_IMAGESAMPLETYPE_UNFILTERABLE_FLOAT);
  20770. const bool needs_comparison = view_desc->texture.sample_type == SG_IMAGESAMPLETYPE_DEPTH;
  20771. if (needs_nonfiltering) {
  20772. _SG_VALIDATE(needs_nonfiltering && (smp_desc->sampler_type == SG_SAMPLERTYPE_NONFILTERING), VALIDATE_SHADERDESC_NONFILTERING_SAMPLER_REQUIRED);
  20773. }
  20774. if (needs_comparison) {
  20775. _SG_VALIDATE(needs_comparison && (smp_desc->sampler_type == SG_SAMPLERTYPE_COMPARISON), VALIDATE_SHADERDESC_COMPARISON_SAMPLER_REQUIRED);
  20776. }
  20777. }
  20778. }
  20779. // each image and sampler must be referenced by an image sampler
  20780. _SG_VALIDATE(texview_slot_mask == ref_texview_slot_mask, VALIDATE_SHADERDESC_TEXVIEW_NOT_REFERENCED_BY_TEXTURE_SAMPLER_PAIRS);
  20781. _SG_VALIDATE(smp_slot_mask == ref_smp_slot_mask, VALIDATE_SHADERDESC_SAMPLER_NOT_REFERENCED_BY_TEXTURE_SAMPLER_PAIRS);
  20782. return _sg_validate_end();
  20783. #endif
  20784. }
  20785. _SOKOL_PRIVATE bool _sg_validate_pipeline_desc(const sg_pipeline_desc* desc) {
  20786. #if !defined(SOKOL_DEBUG)
  20787. _SOKOL_UNUSED(desc);
  20788. return true;
  20789. #else
  20790. if (_sg.desc.disable_validation) {
  20791. return true;
  20792. }
  20793. SOKOL_ASSERT(desc);
  20794. _sg_validate_begin();
  20795. _SG_VALIDATE(desc->_start_canary == 0, VALIDATE_PIPELINEDESC_CANARY);
  20796. _SG_VALIDATE(desc->_end_canary == 0, VALIDATE_PIPELINEDESC_CANARY);
  20797. _SG_VALIDATE(desc->shader.id != SG_INVALID_ID, VALIDATE_PIPELINEDESC_SHADER);
  20798. const _sg_shader_t* shd = _sg_lookup_shader(desc->shader.id);
  20799. _SG_VALIDATE(0 != shd, VALIDATE_PIPELINEDESC_SHADER);
  20800. if (shd) {
  20801. _SG_VALIDATE(shd->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_PIPELINEDESC_SHADER);
  20802. if (desc->compute) {
  20803. _SG_VALIDATE(shd->cmn.is_compute, VALIDATE_PIPELINEDESC_COMPUTE_SHADER_EXPECTED);
  20804. } else {
  20805. _SG_VALIDATE(!shd->cmn.is_compute, VALIDATE_PIPELINEDESC_NO_COMPUTE_SHADER_EXPECTED);
  20806. bool attrs_cont = true;
  20807. for (size_t attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  20808. const sg_vertex_attr_state* a_state = &desc->layout.attrs[attr_index];
  20809. if (a_state->format == SG_VERTEXFORMAT_INVALID) {
  20810. attrs_cont = false;
  20811. continue;
  20812. }
  20813. _SG_VALIDATE(attrs_cont, VALIDATE_PIPELINEDESC_NO_CONT_ATTRS);
  20814. SOKOL_ASSERT(a_state->buffer_index < SG_MAX_VERTEXBUFFER_BINDSLOTS);
  20815. // vertex format must match expected shader attribute base type (if provided)
  20816. if (shd->cmn.attrs[attr_index].base_type != SG_SHADERATTRBASETYPE_UNDEFINED) {
  20817. if (_sg_vertexformat_basetype(a_state->format) != shd->cmn.attrs[attr_index].base_type) {
  20818. _SG_VALIDATE(false, VALIDATE_PIPELINEDESC_ATTR_BASETYPE_MISMATCH);
  20819. _SG_LOGMSG(VALIDATE_PIPELINEDESC_ATTR_BASETYPE_MISMATCH, "attr format:");
  20820. _SG_LOGMSG(VALIDATE_PIPELINEDESC_ATTR_BASETYPE_MISMATCH, _sg_vertexformat_to_string(a_state->format));
  20821. _SG_LOGMSG(VALIDATE_PIPELINEDESC_ATTR_BASETYPE_MISMATCH, "shader attr base type:");
  20822. _SG_LOGMSG(VALIDATE_PIPELINEDESC_ATTR_BASETYPE_MISMATCH, _sg_shaderattrbasetype_to_string(shd->cmn.attrs[attr_index].base_type));
  20823. }
  20824. }
  20825. #if defined(SOKOL_D3D11)
  20826. // on D3D11, semantic names (and semantic indices) must be provided
  20827. _SG_VALIDATE(!_sg_strempty(&shd->d3d11.attrs[attr_index].sem_name), VALIDATE_PIPELINEDESC_ATTR_SEMANTICS);
  20828. #endif
  20829. }
  20830. // must only use readonly storage buffer bindings in render pipelines
  20831. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  20832. if (shd->cmn.views[i].view_type == SG_VIEWTYPE_STORAGEBUFFER) {
  20833. _SG_VALIDATE(shd->cmn.views[i].sbuf_readonly, VALIDATE_PIPELINEDESC_SHADER_READONLY_STORAGEBUFFERS);
  20834. }
  20835. }
  20836. for (int buf_index = 0; buf_index < SG_MAX_VERTEXBUFFER_BINDSLOTS; buf_index++) {
  20837. const sg_vertex_buffer_layout_state* l_state = &desc->layout.buffers[buf_index];
  20838. if (l_state->stride == 0) {
  20839. continue;
  20840. }
  20841. _SG_VALIDATE(_sg_multiple_u64((uint64_t)l_state->stride, 4), VALIDATE_PIPELINEDESC_LAYOUT_STRIDE4);
  20842. }
  20843. }
  20844. }
  20845. for (size_t color_index = 0; color_index < (size_t)desc->color_count; color_index++) {
  20846. SOKOL_ASSERT(color_index < SG_MAX_COLOR_ATTACHMENTS);
  20847. const sg_blend_state* bs = &desc->colors[color_index].blend;
  20848. if ((bs->op_rgb == SG_BLENDOP_MIN) || (bs->op_rgb == SG_BLENDOP_MAX)) {
  20849. _SG_VALIDATE((bs->src_factor_rgb == SG_BLENDFACTOR_ONE) && (bs->dst_factor_rgb == SG_BLENDFACTOR_ONE), VALIDATE_PIPELINEDESC_BLENDOP_MINMAX_REQUIRES_BLENDFACTOR_ONE);
  20850. }
  20851. if ((bs->op_alpha == SG_BLENDOP_MIN) || (bs->op_alpha == SG_BLENDOP_MAX)) {
  20852. _SG_VALIDATE((bs->src_factor_alpha == SG_BLENDFACTOR_ONE) && (bs->dst_factor_alpha == SG_BLENDFACTOR_ONE), VALIDATE_PIPELINEDESC_BLENDOP_MINMAX_REQUIRES_BLENDFACTOR_ONE);
  20853. }
  20854. }
  20855. return _sg_validate_end();
  20856. #endif
  20857. }
  20858. _SOKOL_PRIVATE bool _sg_validate_view_desc(const sg_view_desc* desc) {
  20859. #if !defined(SOKOL_DEBUG)
  20860. _SOKOL_UNUSED(desc);
  20861. return true;
  20862. #else
  20863. if (_sg.desc.disable_validation) {
  20864. return true;
  20865. }
  20866. SOKOL_ASSERT(desc);
  20867. _sg_validate_begin();
  20868. _SG_VALIDATE(desc->_start_canary == 0, VALIDATE_VIEWDESC_CANARY);
  20869. _SG_VALIDATE(desc->_end_canary == 0, VALIDATE_VIEWDESC_CANARY);
  20870. // only one view type can be define
  20871. sg_view_type view_type = SG_VIEWTYPE_INVALID;
  20872. const sg_image_view_desc* img_desc = 0;
  20873. const sg_texture_view_desc* tex_desc = 0;
  20874. const sg_buffer_view_desc* buf_desc = 0;
  20875. if (desc->texture.image.id != SG_INVALID_ID) {
  20876. view_type = SG_VIEWTYPE_TEXTURE;
  20877. tex_desc = &desc->texture;
  20878. }
  20879. if (desc->storage_buffer.buffer.id != SG_INVALID_ID) {
  20880. _SG_VALIDATE(SG_VIEWTYPE_INVALID == view_type, VALIDATE_VIEWDESC_UNIQUE_VIEWTYPE);
  20881. view_type = SG_VIEWTYPE_STORAGEBUFFER;
  20882. buf_desc = &desc->storage_buffer;
  20883. }
  20884. if (desc->storage_image.image.id != SG_INVALID_ID) {
  20885. _SG_VALIDATE(SG_VIEWTYPE_INVALID == view_type, VALIDATE_VIEWDESC_UNIQUE_VIEWTYPE);
  20886. view_type = SG_VIEWTYPE_STORAGEIMAGE;
  20887. img_desc = &desc->storage_image;
  20888. }
  20889. if (desc->color_attachment.image.id != SG_INVALID_ID) {
  20890. _SG_VALIDATE(SG_VIEWTYPE_INVALID == view_type, VALIDATE_VIEWDESC_UNIQUE_VIEWTYPE);
  20891. view_type = SG_VIEWTYPE_COLORATTACHMENT;
  20892. img_desc = &desc->color_attachment;
  20893. }
  20894. if (desc->resolve_attachment.image.id != SG_INVALID_ID) {
  20895. _SG_VALIDATE(SG_VIEWTYPE_INVALID == view_type, VALIDATE_VIEWDESC_UNIQUE_VIEWTYPE);
  20896. view_type = SG_VIEWTYPE_RESOLVEATTACHMENT;
  20897. img_desc = &desc->resolve_attachment;
  20898. }
  20899. if (desc->depth_stencil_attachment.image.id != SG_INVALID_ID) {
  20900. _SG_VALIDATE(SG_VIEWTYPE_INVALID == view_type, VALIDATE_VIEWDESC_UNIQUE_VIEWTYPE);
  20901. view_type = SG_VIEWTYPE_DEPTHSTENCILATTACHMENT;
  20902. img_desc = &desc->depth_stencil_attachment;
  20903. }
  20904. _SG_VALIDATE(SG_VIEWTYPE_INVALID != view_type, VALIDATE_VIEWDESC_ANY_VIEWTYPE);
  20905. const _sg_buffer_t* buf = 0;
  20906. const _sg_image_t* img = 0;
  20907. bool res_valid = false;
  20908. if (buf_desc) {
  20909. SOKOL_ASSERT((img_desc == 0) && (tex_desc == 0));
  20910. buf = _sg_lookup_buffer(buf_desc->buffer.id);
  20911. _SG_VALIDATE(buf, VALIDATE_VIEWDESC_RESOURCE_ALIVE);
  20912. if (buf) {
  20913. _SG_VALIDATE(buf->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_VIEWDESC_RESOURCE_FAILED);
  20914. res_valid = buf->slot.state == SG_RESOURCESTATE_VALID;
  20915. }
  20916. } else if (img_desc) {
  20917. SOKOL_ASSERT((tex_desc == 0) && (buf_desc == 0));
  20918. img = _sg_lookup_image(img_desc->image.id);
  20919. _SG_VALIDATE(img, VALIDATE_VIEWDESC_RESOURCE_ALIVE);
  20920. if (img) {
  20921. _SG_VALIDATE(img->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_VIEWDESC_RESOURCE_FAILED);
  20922. res_valid = img->slot.state == SG_RESOURCESTATE_VALID;
  20923. }
  20924. } else {
  20925. SOKOL_ASSERT(tex_desc && (img_desc == 0) && (buf_desc == 0));
  20926. img = _sg_lookup_image(tex_desc->image.id);
  20927. _SG_VALIDATE(img, VALIDATE_VIEWDESC_RESOURCE_ALIVE);
  20928. if (img) {
  20929. _SG_VALIDATE(img->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_VIEWDESC_RESOURCE_FAILED);
  20930. res_valid = img->slot.state == SG_RESOURCESTATE_VALID;
  20931. }
  20932. }
  20933. if (res_valid) {
  20934. // check usage flags
  20935. switch (view_type) {
  20936. case SG_VIEWTYPE_STORAGEBUFFER:
  20937. SOKOL_ASSERT(buf);
  20938. _SG_VALIDATE(buf->cmn.usage.storage_buffer, VALIDATE_VIEWDESC_STORAGEBUFFER_USAGE);
  20939. break;
  20940. case SG_VIEWTYPE_STORAGEIMAGE:
  20941. SOKOL_ASSERT(img);
  20942. _SG_VALIDATE(img->cmn.usage.storage_image, VALIDATE_VIEWDESC_STORAGEIMAGE_USAGE);
  20943. _SG_VALIDATE(_sg_is_valid_storage_image_format(img->cmn.pixel_format), VALIDATE_VIEWDESC_STORAGEIMAGE_PIXELFORMAT);
  20944. break;
  20945. case SG_VIEWTYPE_TEXTURE:
  20946. if (!_sg.features.msaa_texture_bindings) {
  20947. _SG_VALIDATE(img->cmn.sample_count == 1, VALIDATE_VIEWDESC_TEXTURE_EXPECT_NO_MSAA);
  20948. }
  20949. break;
  20950. case SG_VIEWTYPE_COLORATTACHMENT:
  20951. SOKOL_ASSERT(img);
  20952. _SG_VALIDATE(img->cmn.usage.color_attachment, VALIDATE_VIEWDESC_COLORATTACHMENT_USAGE);
  20953. _SG_VALIDATE(_sg_is_valid_attachment_color_format(img->cmn.pixel_format), VALIDATE_VIEWDESC_COLORATTACHMENT_PIXELFORMAT);
  20954. break;
  20955. case SG_VIEWTYPE_RESOLVEATTACHMENT:
  20956. SOKOL_ASSERT(img);
  20957. _SG_VALIDATE(img->cmn.usage.resolve_attachment, VALIDATE_VIEWDESC_RESOLVEATTACHMENT_USAGE);
  20958. _SG_VALIDATE(img->cmn.sample_count == 1, VALIDATE_VIEWDESC_RESOLVEATTACHMENT_SAMPLECOUNT);
  20959. break;
  20960. case SG_VIEWTYPE_DEPTHSTENCILATTACHMENT:
  20961. SOKOL_ASSERT(img);
  20962. _SG_VALIDATE(img->cmn.usage.depth_stencil_attachment, VALIDATE_VIEWDESC_DEPTHSTENCILATTACHMENT_USAGE);
  20963. _SG_VALIDATE(_sg_is_valid_attachment_depth_format(img->cmn.pixel_format), VALIDATE_VIEWDESC_DEPTHSTENCILATTACHMENT_PIXELFORMAT);
  20964. break;
  20965. default:
  20966. SOKOL_UNREACHABLE;
  20967. break;
  20968. }
  20969. if (buf_desc) {
  20970. SOKOL_ASSERT(buf);
  20971. _SG_VALIDATE(buf_desc->offset < buf->cmn.size, VALIDATE_VIEWDESC_STORAGEBUFFER_OFFSET_VS_BUFFER_SIZE);
  20972. _SG_VALIDATE(_sg_multiple_u64((uint64_t)buf_desc->offset, 256), VALIDATE_VIEWDESC_STORAGEBUFFER_OFFSET_MULTIPLE_256);
  20973. } else if (img_desc) {
  20974. SOKOL_ASSERT(img);
  20975. _SG_VALIDATE((img_desc->mip_level >= 0) && (img_desc->mip_level < img->cmn.num_mipmaps), VALIDATE_VIEWDESC_IMAGE_MIPLEVEL);
  20976. if (img->cmn.type == SG_IMAGETYPE_2D) {
  20977. _SG_VALIDATE(img_desc->slice == 0, VALIDATE_VIEWDESC_IMAGE_2D_SLICE);
  20978. } else if (img->cmn.type == SG_IMAGETYPE_CUBE) {
  20979. _SG_VALIDATE((img_desc->slice >= 0) && (img_desc->slice < 6), VALIDATE_VIEWDESC_IMAGE_CUBEMAP_SLICE);
  20980. } else if (img->cmn.type == SG_IMAGETYPE_ARRAY) {
  20981. _SG_VALIDATE((img_desc->slice >= 0) && (img_desc->slice < img->cmn.num_slices), VALIDATE_VIEWDESC_IMAGE_ARRAY_SLICE);
  20982. } else if (img->cmn.type == SG_IMAGETYPE_3D) {
  20983. _SG_VALIDATE(img_desc->slice == 0, VALIDATE_VIEWDESC_IMAGE_3D_SLICE);
  20984. }
  20985. } else if (tex_desc) {
  20986. SOKOL_ASSERT(img);
  20987. // NOTE: it doesn't matter here if the mip/slice count is default-zero!
  20988. int max_mip_level = tex_desc->mip_levels.base + tex_desc->mip_levels.count;
  20989. int max_slice = tex_desc->slices.base + tex_desc->slices.count;
  20990. _SG_VALIDATE((tex_desc->mip_levels.base >= 0) && (max_mip_level <= img->cmn.num_mipmaps), VALIDATE_VIEWDESC_TEXTURE_MIPLEVELS);
  20991. if (img->cmn.type == SG_IMAGETYPE_2D) {
  20992. _SG_VALIDATE((tex_desc->slices.base == 0) && (max_slice <= 1), VALIDATE_VIEWDESC_TEXTURE_2D_SLICES);
  20993. } else if (img->cmn.type == SG_IMAGETYPE_CUBE) {
  20994. _SG_VALIDATE((tex_desc->slices.base == 0) && (max_slice <= 1), VALIDATE_VIEWDESC_TEXTURE_CUBEMAP_SLICES);
  20995. } else if (img->cmn.type == SG_IMAGETYPE_ARRAY) {
  20996. _SG_VALIDATE((tex_desc->slices.base >= 0) && (max_slice <= img->cmn.num_slices), VALIDATE_VIEWDESC_TEXTURE_ARRAY_SLICES);
  20997. } else if (img->cmn.type == SG_IMAGETYPE_3D) {
  20998. _SG_VALIDATE((tex_desc->slices.base == 0) && (max_slice <= 1), VALIDATE_VIEWDESC_TEXTURE_3D_SLICES);
  20999. }
  21000. }
  21001. }
  21002. return _sg_validate_end();
  21003. #endif
  21004. }
  21005. _SOKOL_PRIVATE bool _sg_validate_begin_pass(const sg_pass* pass) {
  21006. #if !defined(SOKOL_DEBUG)
  21007. _SOKOL_UNUSED(pass);
  21008. return true;
  21009. #else
  21010. if (_sg.desc.disable_validation) {
  21011. return true;
  21012. }
  21013. const bool is_compute_pass = pass->compute;
  21014. const bool is_swapchain_pass = !is_compute_pass && _sg_attachments_empty(&pass->attachments);
  21015. const bool is_offscreen_pass = !(is_compute_pass || is_swapchain_pass);
  21016. _sg_validate_begin();
  21017. _SG_VALIDATE(pass->_start_canary == 0, VALIDATE_BEGINPASS_CANARY);
  21018. _SG_VALIDATE(pass->_end_canary == 0, VALIDATE_BEGINPASS_CANARY);
  21019. if (is_compute_pass) {
  21020. _SG_VALIDATE(_sg_attachments_empty(&pass->attachments), VALIDATE_BEGINPASS_COMPUTEPASS_EXPECT_NO_ATTACHMENTS);
  21021. } else if (is_swapchain_pass) {
  21022. // this is a swapchain pass
  21023. _SG_VALIDATE(pass->swapchain.width > 0, VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_WIDTH);
  21024. _SG_VALIDATE(pass->swapchain.height > 0, VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_HEIGHT);
  21025. _SG_VALIDATE(pass->swapchain.sample_count > 0, VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_SAMPLECOUNT);
  21026. _SG_VALIDATE(pass->swapchain.color_format > SG_PIXELFORMAT_NONE, VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_COLORFORMAT);
  21027. // NOTE: depth buffer is optional, so depth_format is allowed to be invalid
  21028. // NOTE: the GL framebuffer handle may actually be 0
  21029. #if defined(SOKOL_METAL)
  21030. _SG_VALIDATE(pass->swapchain.metal.current_drawable != 0, VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_CURRENTDRAWABLE);
  21031. if (pass->swapchain.depth_format == SG_PIXELFORMAT_NONE) {
  21032. _SG_VALIDATE(pass->swapchain.metal.depth_stencil_texture == 0, VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_DEPTHSTENCILTEXTURE_NOTSET);
  21033. } else {
  21034. _SG_VALIDATE(pass->swapchain.metal.depth_stencil_texture != 0, VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_DEPTHSTENCILTEXTURE);
  21035. }
  21036. if (pass->swapchain.sample_count > 1) {
  21037. _SG_VALIDATE(pass->swapchain.metal.msaa_color_texture != 0, VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_MSAACOLORTEXTURE);
  21038. } else {
  21039. _SG_VALIDATE(pass->swapchain.metal.msaa_color_texture == 0, VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_MSAACOLORTEXTURE_NOTSET);
  21040. }
  21041. #elif defined(SOKOL_D3D11)
  21042. _SG_VALIDATE(pass->swapchain.d3d11.render_view != 0, VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_RENDERVIEW);
  21043. if (pass->swapchain.depth_format == SG_PIXELFORMAT_NONE) {
  21044. _SG_VALIDATE(pass->swapchain.d3d11.depth_stencil_view == 0, VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_DEPTHSTENCILVIEW_NOTSET);
  21045. } else {
  21046. _SG_VALIDATE(pass->swapchain.d3d11.depth_stencil_view != 0, VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_DEPTHSTENCILVIEW);
  21047. }
  21048. if (pass->swapchain.sample_count > 1) {
  21049. _SG_VALIDATE(pass->swapchain.d3d11.resolve_view != 0, VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_RESOLVEVIEW);
  21050. } else {
  21051. _SG_VALIDATE(pass->swapchain.d3d11.resolve_view == 0, VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_RESOLVEVIEW_NOTSET);
  21052. }
  21053. #elif defined(SOKOL_WGPU)
  21054. _SG_VALIDATE(pass->swapchain.wgpu.render_view != 0, VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_RENDERVIEW);
  21055. if (pass->swapchain.depth_format == SG_PIXELFORMAT_NONE) {
  21056. _SG_VALIDATE(pass->swapchain.wgpu.depth_stencil_view == 0, VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_DEPTHSTENCILVIEW_NOTSET);
  21057. } else {
  21058. _SG_VALIDATE(pass->swapchain.wgpu.depth_stencil_view != 0, VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_DEPTHSTENCILVIEW);
  21059. }
  21060. if (pass->swapchain.sample_count > 1) {
  21061. _SG_VALIDATE(pass->swapchain.wgpu.resolve_view != 0, VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_RESOLVEVIEW);
  21062. } else {
  21063. _SG_VALIDATE(pass->swapchain.wgpu.resolve_view == 0, VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_RESOLVEVIEW_NOTSET);
  21064. }
  21065. #endif
  21066. } else {
  21067. // this is an 'offscreen pass'
  21068. bool has_color_atts = false;
  21069. bool has_depth_stencil_atts = false;
  21070. bool atts_cont = true;
  21071. int color_width = -1, color_height = -1, color_sample_count = -1;
  21072. for (int att_index = 0; att_index < SG_MAX_COLOR_ATTACHMENTS; att_index++) {
  21073. if (pass->attachments.colors[att_index].id == SG_INVALID_ID) {
  21074. atts_cont = false;
  21075. continue;
  21076. }
  21077. has_color_atts = true;
  21078. _SG_VALIDATE(atts_cont, VALIDATE_BEGINPASS_COLORATTACHMENTVIEWS_CONTINUOUS);
  21079. const _sg_view_t* view = _sg_lookup_view(pass->attachments.colors[att_index].id);
  21080. // the view object must be alive
  21081. _SG_VALIDATE(view != 0, VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_ALIVE);
  21082. if (view) {
  21083. // the view object must be in valid state
  21084. _SG_VALIDATE(view->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_VALID);
  21085. if (view->slot.state == SG_RESOURCESTATE_VALID) {
  21086. // the view object must be a color attachment view
  21087. _SG_VALIDATE(view->cmn.type == SG_VIEWTYPE_COLORATTACHMENT, VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_TYPE);
  21088. // the view's image object must be alive and valid
  21089. const _sg_image_t* img = _sg_image_ref_ptr_or_null(&view->cmn.img.ref);
  21090. _SG_VALIDATE(img, VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_IMAGE_ALIVE);
  21091. if (img) {
  21092. _SG_VALIDATE(img->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_IMAGE_VALID);
  21093. if (img->slot.state == SG_RESOURCESTATE_VALID) {
  21094. if (color_width == -1) {
  21095. color_width = _sg_image_view_dim(view).width;
  21096. color_height = _sg_image_view_dim(view).height;
  21097. color_sample_count = img->cmn.sample_count;
  21098. } else {
  21099. _SG_VALIDATE(color_width == _sg_image_view_dim(view).width, VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_SIZES);
  21100. _SG_VALIDATE(color_height == _sg_image_view_dim(view).height, VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_SIZES);
  21101. _SG_VALIDATE(color_sample_count == img->cmn.sample_count, VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_SAMPLECOUNTS_EQUAL);
  21102. }
  21103. }
  21104. }
  21105. }
  21106. }
  21107. }
  21108. // check resolve views
  21109. for (int att_index = 0; att_index < SG_MAX_COLOR_ATTACHMENTS; att_index++) {
  21110. if (pass->attachments.resolves[att_index].id == SG_INVALID_ID) {
  21111. continue;
  21112. }
  21113. _SG_VALIDATE(pass->attachments.colors[att_index].id != SG_INVALID_ID, VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_NO_COLORATTACHMENTVIEW);
  21114. const _sg_view_t* view = _sg_lookup_view(pass->attachments.resolves[att_index].id);
  21115. // the view object must be alive
  21116. _SG_VALIDATE(view != 0, VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_ALIVE);
  21117. if (view) {
  21118. // the view object must be in valid state
  21119. _SG_VALIDATE(view->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_VALID);
  21120. if (view->slot.state == SG_RESOURCESTATE_VALID) {
  21121. // the view object must be a resolve attachment view
  21122. _SG_VALIDATE(view->cmn.type == SG_VIEWTYPE_RESOLVEATTACHMENT, VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_TYPE);
  21123. // the view's image object must be alive and valid
  21124. const _sg_image_t* img = _sg_image_ref_ptr_or_null(&view->cmn.img.ref);
  21125. _SG_VALIDATE(img, VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_IMAGE_ALIVE);
  21126. if (img) {
  21127. _SG_VALIDATE(img->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_IMAGE_VALID);
  21128. if (img->slot.state == SG_RESOURCESTATE_VALID) {
  21129. if (color_width != -1) {
  21130. _SG_VALIDATE(color_sample_count > 1, VALIDATE_BEGINPASS_COLORATTACHMENTVIEW_SAMPLECOUNT);
  21131. _SG_VALIDATE(color_width == _sg_image_view_dim(view).width, VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_SIZES);
  21132. _SG_VALIDATE(color_height == _sg_image_view_dim(view).height, VALIDATE_BEGINPASS_RESOLVEATTACHMENTVIEW_SIZES);
  21133. }
  21134. }
  21135. }
  21136. }
  21137. }
  21138. }
  21139. // check depth-stencil view
  21140. if (pass->attachments.depth_stencil.id != SG_INVALID_ID) {
  21141. has_depth_stencil_atts = true;
  21142. const _sg_view_t* view = _sg_lookup_view(pass->attachments.depth_stencil.id);
  21143. // the view object must be valid
  21144. _SG_VALIDATE(view != 0, VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_ALIVE);
  21145. if (view) {
  21146. // the view object must be in valid state
  21147. _SG_VALIDATE(view->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_VALID);
  21148. if (view->slot.state == SG_RESOURCESTATE_VALID) {
  21149. // the view object must be a depth stencil attachment view
  21150. _SG_VALIDATE(view->cmn.type == SG_VIEWTYPE_DEPTHSTENCILATTACHMENT, VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_TYPE);
  21151. // the view's image object must be alive and valid
  21152. const _sg_image_t* img = _sg_image_ref_ptr_or_null(&view->cmn.img.ref);
  21153. _SG_VALIDATE(img, VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_IMAGE_ALIVE);
  21154. if (img) {
  21155. _SG_VALIDATE(img->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_IMAGE_VALID);
  21156. if (img->slot.state == SG_RESOURCESTATE_VALID) {
  21157. if (color_width != -1) {
  21158. _SG_VALIDATE(color_width == _sg_image_view_dim(view).width, VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_SIZES);
  21159. _SG_VALIDATE(color_height == _sg_image_view_dim(view).height, VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_SIZES);
  21160. _SG_VALIDATE(color_sample_count == img->cmn.sample_count, VALIDATE_BEGINPASS_DEPTHSTENCILATTACHMENTVIEW_SAMPLECOUNT);
  21161. }
  21162. }
  21163. }
  21164. }
  21165. }
  21166. }
  21167. // must have at least color- or depth-stencil-attachments
  21168. _SG_VALIDATE(has_color_atts || has_depth_stencil_atts, VALIDATE_BEGINPASS_ATTACHMENTS_EXPECTED);
  21169. }
  21170. if (is_compute_pass || is_offscreen_pass) {
  21171. _SG_VALIDATE(pass->swapchain.width == 0, VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_WIDTH_NOTSET);
  21172. _SG_VALIDATE(pass->swapchain.height == 0, VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_HEIGHT_NOTSET);
  21173. _SG_VALIDATE(pass->swapchain.sample_count == 0, VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_SAMPLECOUNT_NOTSET);
  21174. _SG_VALIDATE(pass->swapchain.color_format == _SG_PIXELFORMAT_DEFAULT, VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_COLORFORMAT_NOTSET);
  21175. _SG_VALIDATE(pass->swapchain.depth_format == _SG_PIXELFORMAT_DEFAULT, VALIDATE_BEGINPASS_SWAPCHAIN_EXPECT_DEPTHFORMAT_NOTSET);
  21176. #if defined(SOKOL_METAL)
  21177. _SG_VALIDATE(pass->swapchain.metal.current_drawable == 0, VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_CURRENTDRAWABLE_NOTSET);
  21178. _SG_VALIDATE(pass->swapchain.metal.depth_stencil_texture == 0, VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_DEPTHSTENCILTEXTURE_NOTSET);
  21179. _SG_VALIDATE(pass->swapchain.metal.msaa_color_texture == 0, VALIDATE_BEGINPASS_SWAPCHAIN_METAL_EXPECT_MSAACOLORTEXTURE_NOTSET);
  21180. #elif defined(SOKOL_D3D11)
  21181. _SG_VALIDATE(pass->swapchain.d3d11.render_view == 0, VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_RENDERVIEW_NOTSET);
  21182. _SG_VALIDATE(pass->swapchain.d3d11.depth_stencil_view == 0, VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_DEPTHSTENCILVIEW_NOTSET);
  21183. _SG_VALIDATE(pass->swapchain.d3d11.resolve_view == 0, VALIDATE_BEGINPASS_SWAPCHAIN_D3D11_EXPECT_RESOLVEVIEW_NOTSET);
  21184. #elif defined(SOKOL_WGPU)
  21185. _SG_VALIDATE(pass->swapchain.wgpu.render_view == 0, VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_RENDERVIEW_NOTSET);
  21186. _SG_VALIDATE(pass->swapchain.wgpu.depth_stencil_view == 0, VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_DEPTHSTENCILVIEW_NOTSET);
  21187. _SG_VALIDATE(pass->swapchain.wgpu.resolve_view == 0, VALIDATE_BEGINPASS_SWAPCHAIN_WGPU_EXPECT_RESOLVEVIEW_NOTSET);
  21188. #elif defined(_SOKOL_ANY_GL)
  21189. _SG_VALIDATE(pass->swapchain.gl.framebuffer == 0, VALIDATE_BEGINPASS_SWAPCHAIN_GL_EXPECT_FRAMEBUFFER_NOTSET);
  21190. #endif
  21191. }
  21192. return _sg_validate_end();
  21193. #endif
  21194. }
  21195. _SOKOL_PRIVATE bool _sg_validate_apply_viewport(int x, int y, int width, int height, bool origin_top_left) {
  21196. _SOKOL_UNUSED(x);
  21197. _SOKOL_UNUSED(y);
  21198. _SOKOL_UNUSED(width);
  21199. _SOKOL_UNUSED(height);
  21200. _SOKOL_UNUSED(origin_top_left);
  21201. #if !defined(SOKOL_DEBUG)
  21202. return true;
  21203. #else
  21204. if (_sg.desc.disable_validation) {
  21205. return true;
  21206. }
  21207. _sg_validate_begin();
  21208. _SG_VALIDATE(_sg.cur_pass.in_pass && !_sg.cur_pass.is_compute, VALIDATE_AVP_RENDERPASS_EXPECTED);
  21209. return _sg_validate_end();
  21210. #endif
  21211. }
  21212. _SOKOL_PRIVATE bool _sg_validate_apply_scissor_rect(int x, int y, int width, int height, bool origin_top_left) {
  21213. _SOKOL_UNUSED(x);
  21214. _SOKOL_UNUSED(y);
  21215. _SOKOL_UNUSED(width);
  21216. _SOKOL_UNUSED(height);
  21217. _SOKOL_UNUSED(origin_top_left);
  21218. #if !defined(SOKOL_DEBUG)
  21219. return true;
  21220. #else
  21221. if (_sg.desc.disable_validation) {
  21222. return true;
  21223. }
  21224. _sg_validate_begin();
  21225. _SG_VALIDATE(_sg.cur_pass.in_pass && !_sg.cur_pass.is_compute, VALIDATE_ASR_RENDERPASS_EXPECTED);
  21226. return _sg_validate_end();
  21227. #endif
  21228. }
  21229. _SOKOL_PRIVATE bool _sg_validate_apply_pipeline(sg_pipeline pip_id) {
  21230. #if !defined(SOKOL_DEBUG)
  21231. _SOKOL_UNUSED(pip_id);
  21232. return true;
  21233. #else
  21234. if (_sg.desc.disable_validation) {
  21235. return true;
  21236. }
  21237. _sg_validate_begin();
  21238. // the pipeline object must be alive and valid
  21239. _SG_VALIDATE(pip_id.id != SG_INVALID_ID, VALIDATE_APIP_PIPELINE_VALID_ID);
  21240. const _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  21241. _SG_VALIDATE(pip != 0, VALIDATE_APIP_PIPELINE_EXISTS);
  21242. if (!pip) {
  21243. return _sg_validate_end();
  21244. }
  21245. _SG_VALIDATE(pip->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_APIP_PIPELINE_VALID);
  21246. // the pipeline's shader must be alive and valid
  21247. _SG_VALIDATE(_sg.cur_pass.in_pass, VALIDATE_APIP_PASS_EXPECTED);
  21248. const bool shd_alive = _sg_shader_ref_alive(&pip->cmn.shader);
  21249. const _sg_shader_t* shd = shd_alive ? _sg_shader_ref_ptr(&pip->cmn.shader) : 0;
  21250. _SG_VALIDATE(shd_alive, VALIDATE_APIP_PIPELINE_SHADER_ALIVE);
  21251. if (shd_alive) {
  21252. _SG_VALIDATE(shd->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_APIP_PIPELINE_SHADER_VALID);
  21253. } else {
  21254. return _sg_validate_end();
  21255. }
  21256. if (pip->cmn.is_compute) {
  21257. _SG_VALIDATE(_sg.cur_pass.is_compute, VALIDATE_APIP_COMPUTEPASS_EXPECTED);
  21258. } else {
  21259. _SG_VALIDATE(!_sg.cur_pass.is_compute, VALIDATE_APIP_RENDERPASS_EXPECTED);
  21260. if (_sg_attachments_empty(&_sg.cur_pass.atts)) {
  21261. // a swapchain pass
  21262. _SG_VALIDATE(pip->cmn.color_count == 1, VALIDATE_APIP_SWAPCHAIN_COLOR_COUNT);
  21263. _SG_VALIDATE(pip->cmn.colors[0].pixel_format == _sg.cur_pass.swapchain.color_fmt, VALIDATE_APIP_SWAPCHAIN_COLOR_FORMAT);
  21264. _SG_VALIDATE(pip->cmn.depth.pixel_format == _sg.cur_pass.swapchain.depth_fmt, VALIDATE_APIP_SWAPCHAIN_DEPTH_FORMAT);
  21265. _SG_VALIDATE(pip->cmn.sample_count == _sg.cur_pass.swapchain.sample_count, VALIDATE_APIP_SWAPCHAIN_SAMPLE_COUNT);
  21266. } else {
  21267. // an offscreen render pass check that pipeline attributes match current pass attachment attributes
  21268. const _sg_attachments_ptrs_t atts_ptrs = _sg_attachments_ptrs(&_sg.cur_pass.atts);
  21269. const bool alive = _sg_attachments_alive(&atts_ptrs);
  21270. _SG_VALIDATE(alive, VALIDATE_APIP_ATTACHMENTS_ALIVE);
  21271. if (alive) {
  21272. _SG_VALIDATE(pip->cmn.color_count == atts_ptrs.num_color_views, VALIDATE_APIP_COLORATTACHMENTS_COUNT);
  21273. for (int i = 0; i < pip->cmn.color_count; i++) {
  21274. const _sg_view_t* clr_view = atts_ptrs.color_views[i];
  21275. SOKOL_ASSERT(clr_view);
  21276. _SG_VALIDATE(clr_view->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_APIP_COLORATTACHMENTS_VIEW_VALID);
  21277. const _sg_image_t* clr_img = _sg_image_ref_ptr(&clr_view->cmn.img.ref);
  21278. SOKOL_ASSERT(clr_img);
  21279. _SG_VALIDATE(clr_img->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_APIP_COLORATTACHMENTS_IMAGE_VALID);
  21280. _SG_VALIDATE(pip->cmn.colors[i].pixel_format == clr_img->cmn.pixel_format, VALIDATE_APIP_COLORATTACHMENTS_FORMAT);
  21281. _SG_VALIDATE(pip->cmn.sample_count == clr_img->cmn.sample_count, VALIDATE_APIP_ATTACHMENT_SAMPLE_COUNT);
  21282. }
  21283. const _sg_view_t* ds_view = atts_ptrs.ds_view;
  21284. if (ds_view) {
  21285. _SG_VALIDATE(ds_view->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_APIP_DEPTHSTENCILATTACHMENT_VIEW_VALID);
  21286. const _sg_image_t* ds_img = _sg_image_ref_ptr(&ds_view->cmn.img.ref);
  21287. SOKOL_ASSERT(ds_img);
  21288. _SG_VALIDATE(ds_img->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_APIP_DEPTHSTENCILATTACHMENT_IMAGE_VALID);
  21289. _SG_VALIDATE(pip->cmn.depth.pixel_format == ds_img->cmn.pixel_format, VALIDATE_APIP_DEPTHSTENCILATTACHMENT_FORMAT);
  21290. _SG_VALIDATE(pip->cmn.sample_count == ds_img->cmn.sample_count, VALIDATE_APIP_ATTACHMENT_SAMPLE_COUNT);
  21291. } else {
  21292. _SG_VALIDATE(pip->cmn.depth.pixel_format == SG_PIXELFORMAT_NONE, VALIDATE_APIP_DEPTHSTENCILATTACHMENT_FORMAT);
  21293. }
  21294. }
  21295. }
  21296. }
  21297. return _sg_validate_end();
  21298. #endif
  21299. }
  21300. _SOKOL_PRIVATE bool _sg_validate_apply_bindings(const sg_bindings* bindings) {
  21301. #if !defined(SOKOL_DEBUG)
  21302. _SOKOL_UNUSED(bindings);
  21303. return true;
  21304. #else
  21305. if (_sg.desc.disable_validation) {
  21306. return true;
  21307. }
  21308. _sg_validate_begin();
  21309. // must be called in a pass
  21310. _SG_VALIDATE(_sg.cur_pass.in_pass, VALIDATE_ABND_PASS_EXPECTED);
  21311. // bindings must not be empty
  21312. bool has_any_bindings = bindings->index_buffer.id != SG_INVALID_ID;
  21313. if (!has_any_bindings) for (size_t i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  21314. has_any_bindings |= bindings->vertex_buffers[i].id != SG_INVALID_ID;
  21315. }
  21316. if (!has_any_bindings) for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  21317. has_any_bindings |= bindings->views[i].id != SG_INVALID_ID;
  21318. }
  21319. if (!has_any_bindings) for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  21320. has_any_bindings |= bindings->samplers[i].id != SG_INVALID_ID;
  21321. }
  21322. _SG_VALIDATE(has_any_bindings, VALIDATE_ABND_EMPTY_BINDINGS);
  21323. // a pipeline object must have been applied
  21324. const bool pip_null = _sg_pipeline_ref_null(&_sg.cur_pip);
  21325. const bool pip_alive = _sg_pipeline_ref_alive(&_sg.cur_pip);
  21326. _SG_VALIDATE(!pip_null, VALIDATE_ABND_NO_PIPELINE);
  21327. _SG_VALIDATE(pip_alive, VALIDATE_ABND_PIPELINE_ALIVE);
  21328. if (!pip_alive) {
  21329. return _sg_validate_end();
  21330. }
  21331. const _sg_pipeline_t* pip = _sg_pipeline_ref_ptr(&_sg.cur_pip);
  21332. _SG_VALIDATE(pip->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_ABND_PIPELINE_VALID);
  21333. const bool shd_alive = _sg_shader_ref_alive(&pip->cmn.shader);
  21334. _SG_VALIDATE(shd_alive, VALIDATE_ABND_PIPELINE_SHADER_ALIVE);
  21335. if (!shd_alive) {
  21336. return _sg_validate_end();
  21337. }
  21338. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  21339. _SG_VALIDATE(shd->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_ABND_PIPELINE_SHADER_VALID);
  21340. if (_sg.cur_pass.is_compute) {
  21341. for (size_t i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  21342. _SG_VALIDATE(bindings->vertex_buffers[i].id == SG_INVALID_ID, VALIDATE_ABND_COMPUTE_EXPECTED_NO_VBUFS);
  21343. }
  21344. } else {
  21345. for (size_t i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  21346. if (pip->cmn.vertex_buffer_layout_active[i]) {
  21347. _SG_VALIDATE(bindings->vertex_buffers[i].id != SG_INVALID_ID, VALIDATE_ABND_EXPECTED_VBUF);
  21348. if (bindings->vertex_buffers[i].id != SG_INVALID_ID) {
  21349. const _sg_buffer_t* buf = _sg_lookup_buffer(bindings->vertex_buffers[i].id);
  21350. _SG_VALIDATE(buf != 0, VALIDATE_ABND_VBUF_ALIVE);
  21351. // NOTE: state != VALID is legal and skips rendering!
  21352. if (buf && buf->slot.state == SG_RESOURCESTATE_VALID) {
  21353. _SG_VALIDATE(buf->cmn.usage.vertex_buffer, VALIDATE_ABND_VBUF_USAGE);
  21354. _SG_VALIDATE(!buf->cmn.append_overflow, VALIDATE_ABND_VBUF_OVERFLOW);
  21355. }
  21356. }
  21357. }
  21358. }
  21359. }
  21360. if (_sg.cur_pass.is_compute) {
  21361. _SG_VALIDATE(bindings->index_buffer.id == SG_INVALID_ID, VALIDATE_ABND_COMPUTE_EXPECTED_NO_IBUF);
  21362. } else {
  21363. // index buffer expected or not, and index buffer still exists
  21364. if (pip->cmn.index_type == SG_INDEXTYPE_NONE) {
  21365. // pipeline defines non-indexed rendering, but index buffer provided
  21366. _SG_VALIDATE(bindings->index_buffer.id == SG_INVALID_ID, VALIDATE_ABND_EXPECTED_NO_IBUF);
  21367. } else {
  21368. // pipeline defines indexed rendering, but no index buffer provided
  21369. _SG_VALIDATE(bindings->index_buffer.id != SG_INVALID_ID, VALIDATE_ABND_EXPECTED_IBUF);
  21370. }
  21371. if (bindings->index_buffer.id != SG_INVALID_ID) {
  21372. // buffer in index-buffer-slot must have index buffer usage
  21373. const _sg_buffer_t* buf = _sg_lookup_buffer(bindings->index_buffer.id);
  21374. _SG_VALIDATE(buf != 0, VALIDATE_ABND_IBUF_ALIVE);
  21375. // NOTE: state != VALID is legal and skips rendering!
  21376. if (buf && buf->slot.state == SG_RESOURCESTATE_VALID) {
  21377. _SG_VALIDATE(buf->cmn.usage.index_buffer, VALIDATE_ABND_IBUF_USAGE);
  21378. _SG_VALIDATE(!buf->cmn.append_overflow, VALIDATE_ABND_IBUF_OVERFLOW);
  21379. }
  21380. }
  21381. }
  21382. // has expected view bindings
  21383. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  21384. if (shd->cmn.views[i].view_type != SG_VIEWTYPE_INVALID) {
  21385. _SG_VALIDATE(bindings->views[i].id != SG_INVALID_ID, VALIDATE_ABND_EXPECTED_VIEW_BINDING);
  21386. if (bindings->views[i].id != SG_INVALID_ID) {
  21387. const _sg_view_t* view = _sg_lookup_view(bindings->views[i].id);
  21388. _SG_VALIDATE(view != 0, VALIDATE_ABND_VIEW_ALIVE);
  21389. // the view object must be alive
  21390. if (view) {
  21391. // NOTE: an invalid view state is allowed and skips rendering
  21392. if (view->slot.state == SG_RESOURCESTATE_VALID) {
  21393. if (shd->cmn.views[i].view_type == SG_VIEWTYPE_TEXTURE) {
  21394. // the view object must be a texture view
  21395. _SG_VALIDATE(view->cmn.type == SG_VIEWTYPE_TEXTURE, VALIDATE_ABND_EXPECT_TEXVIEW);
  21396. // NOTE: an invalid image ref is allowed and skips rendering
  21397. if (_sg_image_ref_valid(&view->cmn.img.ref)) {
  21398. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  21399. _SG_VALIDATE(img->cmn.type == shd->cmn.views[i].image_type, VALIDATE_ABND_TEXVIEW_IMAGETYPE_MISMATCH);
  21400. if (shd->cmn.views[i].multisampled) {
  21401. _SG_VALIDATE(img->cmn.sample_count > 1, VALIDATE_ABND_TEXVIEW_EXPECTED_MULTISAMPLED_IMAGE);
  21402. } else {
  21403. _SG_VALIDATE(img->cmn.sample_count == 1, VALIDATE_ABND_TEXVIEW_EXPECTED_NON_MULTISAMPLED_IMAGE);
  21404. }
  21405. const _sg_pixelformat_info_t* info = &_sg.formats[img->cmn.pixel_format];
  21406. switch (shd->cmn.views[i].sample_type) {
  21407. case SG_IMAGESAMPLETYPE_FLOAT:
  21408. _SG_VALIDATE(info->filter, VALIDATE_ABND_TEXVIEW_EXPECTED_FILTERABLE_IMAGE);
  21409. break;
  21410. case SG_IMAGESAMPLETYPE_DEPTH:
  21411. _SG_VALIDATE(info->depth, VALIDATE_ABND_TEXVIEW_EXPECTED_DEPTH_IMAGE);
  21412. break;
  21413. default:
  21414. break;
  21415. }
  21416. }
  21417. } else if (shd->cmn.views[i].view_type == SG_VIEWTYPE_STORAGEBUFFER) {
  21418. // the view object must be a storage buffer view
  21419. _SG_VALIDATE(view->cmn.type == SG_VIEWTYPE_STORAGEBUFFER, VALIDATE_ABND_EXPECT_SBVIEW);
  21420. // NOTE: an invalid buffer ref is allowed and skips rendering
  21421. if (_sg_buffer_ref_valid(&view->cmn.buf.ref)) {
  21422. const _sg_buffer_t* buf = _sg_buffer_ref_ptr(&view->cmn.buf.ref);
  21423. if (!shd->cmn.views[i].sbuf_readonly) {
  21424. _SG_VALIDATE(buf->cmn.usage.immutable, VALIDATE_ABND_SBVIEW_READWRITE_IMMUTABLE);
  21425. }
  21426. }
  21427. } else if (shd->cmn.views[i].view_type == SG_VIEWTYPE_STORAGEIMAGE) {
  21428. // the view object must be a storage-image-view
  21429. _SG_VALIDATE(view->cmn.type == SG_VIEWTYPE_STORAGEIMAGE, VALIDATE_ABND_EXPECT_SIMGVIEW);
  21430. // storage images only allowed in compute passes
  21431. _SG_VALIDATE(_sg.cur_pass.is_compute, VALIDATE_ABND_SIMGVIEW_COMPUTE_PASS_EXPECTED);
  21432. // NOTE: an invalid image ref is allowed and skips rendering
  21433. if (_sg_image_ref_valid(&view->cmn.img.ref)) {
  21434. const _sg_image_t* img = _sg_image_ref_ptr(&view->cmn.img.ref);
  21435. _SG_VALIDATE(img->cmn.type == shd->cmn.views[i].image_type, VALIDATE_ABND_SIMGVIEW_IMAGETYPE_MISMATCH);
  21436. _SG_VALIDATE(img->cmn.pixel_format == shd->cmn.views[i].access_format, VALIDATE_ABND_SIMGVIEW_ACCESSFORMAT);
  21437. }
  21438. }
  21439. }
  21440. }
  21441. }
  21442. }
  21443. }
  21444. // has expected samplers
  21445. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  21446. if (shd->cmn.samplers[i].stage != SG_SHADERSTAGE_NONE) {
  21447. _SG_VALIDATE(bindings->samplers[i].id != SG_INVALID_ID, VALIDATE_ABND_EXPECTED_SAMPLER_BINDING);
  21448. if (bindings->samplers[i].id != SG_INVALID_ID) {
  21449. const _sg_sampler_t* smp = _sg_lookup_sampler(bindings->samplers[i].id);
  21450. _SG_VALIDATE(smp != 0, VALIDATE_ABND_SAMPLER_ALIVE);
  21451. if (smp) {
  21452. // NOTE: for invalid samplers don't skip rendering, but are actually an error
  21453. _SG_VALIDATE(smp->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_ABND_SAMPLER_VALID);
  21454. if (shd->cmn.samplers[i].sampler_type == SG_SAMPLERTYPE_COMPARISON) {
  21455. _SG_VALIDATE(smp->cmn.compare != SG_COMPAREFUNC_NEVER, VALIDATE_ABND_UNEXPECTED_SAMPLER_COMPARE_NEVER);
  21456. } else {
  21457. _SG_VALIDATE(smp->cmn.compare == SG_COMPAREFUNC_NEVER, VALIDATE_ABND_EXPECTED_SAMPLER_COMPARE_NEVER);
  21458. }
  21459. if (shd->cmn.samplers[i].sampler_type == SG_SAMPLERTYPE_NONFILTERING) {
  21460. const bool nonfiltering = (smp->cmn.min_filter != SG_FILTER_LINEAR)
  21461. && (smp->cmn.mag_filter != SG_FILTER_LINEAR)
  21462. && (smp->cmn.mipmap_filter != SG_FILTER_LINEAR);
  21463. _SG_VALIDATE(nonfiltering, VALIDATE_ABND_EXPECTED_NONFILTERING_SAMPLER);
  21464. }
  21465. }
  21466. }
  21467. }
  21468. }
  21469. // the same image cannot be used as texture binding and pass attachment or storage image binding
  21470. for (size_t tex_view_idx = 0; tex_view_idx < SG_MAX_VIEW_BINDSLOTS; tex_view_idx++) {
  21471. if (shd->cmn.views[tex_view_idx].view_type == SG_VIEWTYPE_TEXTURE) {
  21472. if (bindings->views[tex_view_idx].id == SG_INVALID_ID) {
  21473. continue;
  21474. }
  21475. const _sg_view_t* tex_view = _sg_lookup_view(bindings->views[tex_view_idx].id);
  21476. if (tex_view) {
  21477. const uint32_t img_id = tex_view->cmn.img.ref.sref.id;
  21478. if (!_sg_attachments_empty(&_sg.cur_pass.atts)) {
  21479. const _sg_view_t* ds_view = _sg_lookup_view(_sg.cur_pass.atts.depth_stencil.id);
  21480. if (ds_view) {
  21481. _SG_VALIDATE(img_id != ds_view->cmn.img.ref.sref.id, VALIDATE_ABND_TEXTURE_BINDING_VS_DEPTHSTENCIL_ATTACHMENT);
  21482. }
  21483. for (size_t att_idx = 0; att_idx < SG_MAX_COLOR_ATTACHMENTS; att_idx++) {
  21484. const _sg_view_t* color_view = _sg_lookup_view(_sg.cur_pass.atts.colors[att_idx].id);
  21485. if (color_view) {
  21486. _SG_VALIDATE(img_id != color_view->cmn.img.ref.sref.id, VALIDATE_ABND_TEXTURE_BINDING_VS_COLOR_ATTACHMENT);
  21487. }
  21488. const _sg_view_t* resolve_view = _sg_lookup_view(_sg.cur_pass.atts.resolves[att_idx].id);
  21489. if (resolve_view) {
  21490. _SG_VALIDATE(img_id != resolve_view->cmn.img.ref.sref.id, VALIDATE_ABND_TEXTURE_BINDING_VS_RESOLVE_ATTACHMENT);
  21491. }
  21492. }
  21493. }
  21494. for (size_t simg_view_idx = 0; simg_view_idx < SG_MAX_VIEW_BINDSLOTS; simg_view_idx++) {
  21495. if (shd->cmn.views[simg_view_idx].view_type == SG_VIEWTYPE_STORAGEIMAGE) {
  21496. if (bindings->views[simg_view_idx].id == SG_INVALID_ID) {
  21497. continue;
  21498. }
  21499. const _sg_view_t* simg_view = _sg_lookup_view(bindings->views[simg_view_idx].id);
  21500. if (simg_view) {
  21501. _SG_VALIDATE(img_id != simg_view->cmn.img.ref.sref.id, VALIDATE_ABND_TEXTURE_VS_STORAGEIMAGE_BINDING);
  21502. }
  21503. }
  21504. }
  21505. }
  21506. }
  21507. }
  21508. return _sg_validate_end();
  21509. #endif
  21510. }
  21511. _SOKOL_PRIVATE bool _sg_validate_apply_uniforms(int ub_slot, const sg_range* data) {
  21512. #if !defined(SOKOL_DEBUG)
  21513. _SOKOL_UNUSED(ub_slot);
  21514. _SOKOL_UNUSED(data);
  21515. return true;
  21516. #else
  21517. if (_sg.desc.disable_validation) {
  21518. return true;
  21519. }
  21520. SOKOL_ASSERT((ub_slot >= 0) && (ub_slot < SG_MAX_UNIFORMBLOCK_BINDSLOTS));
  21521. _sg_validate_begin();
  21522. _SG_VALIDATE(_sg.cur_pass.in_pass, VALIDATE_AU_PASS_EXPECTED);
  21523. const _sg_pipeline_ref_t* pip_ref = &_sg.cur_pip;
  21524. const bool pip_null = _sg_pipeline_ref_null(pip_ref);
  21525. const bool pip_alive = _sg_pipeline_ref_alive(pip_ref);
  21526. _SG_VALIDATE(!pip_null, VALIDATE_AU_NO_PIPELINE);
  21527. _SG_VALIDATE(pip_alive, VALIDATE_AU_PIPELINE_ALIVE);
  21528. if (pip_alive) {
  21529. const _sg_pipeline_t* pip = _sg_pipeline_ref_ptr(pip_ref);
  21530. _SG_VALIDATE(pip->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_AU_PIPELINE_VALID);
  21531. const _sg_shader_ref_t* shd_ref = &pip->cmn.shader;
  21532. const bool shd_alive = _sg_shader_ref_alive(shd_ref);
  21533. _SG_VALIDATE(shd_alive, VALIDATE_AU_PIPELINE_SHADER_ALIVE);
  21534. if (shd_alive) {
  21535. const _sg_shader_t* shd = _sg_shader_ref_ptr(shd_ref);
  21536. _SG_VALIDATE(shd->slot.state == SG_RESOURCESTATE_VALID, VALIDATE_AU_PIPELINE_SHADER_VALID);
  21537. _SG_VALIDATE(shd->cmn.uniform_blocks[ub_slot].stage != SG_SHADERSTAGE_NONE, VALIDATE_AU_NO_UNIFORMBLOCK_AT_SLOT);
  21538. _SG_VALIDATE(data->size == shd->cmn.uniform_blocks[ub_slot].size, VALIDATE_AU_SIZE);
  21539. }
  21540. }
  21541. return _sg_validate_end();
  21542. #endif
  21543. }
  21544. _SOKOL_PRIVATE bool _sg_validate_draw(int base_element, int num_elements, int num_instances) {
  21545. #if !defined(SOKOL_DEBUG)
  21546. _SOKOL_UNUSED(base_element);
  21547. _SOKOL_UNUSED(num_elements);
  21548. _SOKOL_UNUSED(num_instances);
  21549. return true;
  21550. #else
  21551. if (_sg.desc.disable_validation) {
  21552. return true;
  21553. }
  21554. _sg_validate_begin();
  21555. _SG_VALIDATE(_sg.cur_pass.in_pass && !_sg.cur_pass.is_compute, VALIDATE_DRAW_RENDERPASS_EXPECTED);
  21556. _SG_VALIDATE(base_element >= 0, VALIDATE_DRAW_BASEELEMENT_GE_ZERO);
  21557. _SG_VALIDATE(num_elements >= 0, VALIDATE_DRAW_NUMELEMENTS_GE_ZERO);
  21558. _SG_VALIDATE(num_instances >= 0, VALIDATE_DRAW_NUMINSTANCES_GE_ZERO);
  21559. _SG_VALIDATE(_sg.required_bindings_and_uniforms == _sg.applied_bindings_and_uniforms, VALIDATE_DRAW_REQUIRED_BINDINGS_OR_UNIFORMS_MISSING);
  21560. return _sg_validate_end();
  21561. #endif
  21562. }
  21563. _SOKOL_PRIVATE bool _sg_validate_draw_ex(int base_element, int num_elements, int num_instances, int base_vertex, int base_instance) {
  21564. #if !defined(SOKOL_DEBUG)
  21565. _SOKOL_UNUSED(base_element);
  21566. _SOKOL_UNUSED(num_elements);
  21567. _SOKOL_UNUSED(num_instances);
  21568. _SOKOL_UNUSED(base_vertex);
  21569. _SOKOL_UNUSED(base_instance);
  21570. return true;
  21571. #else
  21572. if (_sg.desc.disable_validation) {
  21573. return true;
  21574. }
  21575. _sg_validate_begin();
  21576. _SG_VALIDATE(_sg.cur_pass.in_pass && !_sg.cur_pass.is_compute, VALIDATE_DRAW_EX_RENDERPASS_EXPECTED);
  21577. // NOTE: base_vertex is allowed to be < 0
  21578. _SG_VALIDATE(base_element >= 0, VALIDATE_DRAW_EX_BASEELEMENT_GE_ZERO);
  21579. _SG_VALIDATE(num_elements >= 0, VALIDATE_DRAW_EX_NUMELEMENTS_GE_ZERO);
  21580. _SG_VALIDATE(num_instances >= 0, VALIDATE_DRAW_EX_NUMINSTANCES_GE_ZERO);
  21581. _SG_VALIDATE(base_instance >= 0, VALIDATE_DRAW_EX_BASEINSTANCE_GE_ZERO);
  21582. if (base_vertex != 0) {
  21583. _SG_VALIDATE(_sg.features.draw_base_vertex, VALIDATE_DRAW_EX_BASEVERTEX_NOT_SUPPORTED);
  21584. }
  21585. if (base_instance > 0) {
  21586. _SG_VALIDATE(_sg.features.draw_base_instance, VALIDATE_DRAW_EX_BASEINSTANCE_NOT_SUPPORTED);
  21587. }
  21588. if (!_sg.use_indexed_draw) {
  21589. _SG_VALIDATE(base_vertex == 0, VALIDATE_DRAW_EX_BASEVERTEX_VS_INDEXED);
  21590. }
  21591. const bool use_instanced_draw = (num_instances > 1) || _sg.use_instanced_draw;
  21592. if (!use_instanced_draw) {
  21593. _SG_VALIDATE(base_instance == 0, VALIDATE_DRAW_EX_BASEINSTANCE_VS_INSTANCED);
  21594. }
  21595. _SG_VALIDATE(_sg.required_bindings_and_uniforms == _sg.applied_bindings_and_uniforms, VALIDATE_DRAW_REQUIRED_BINDINGS_OR_UNIFORMS_MISSING);
  21596. return _sg_validate_end();
  21597. #endif
  21598. }
  21599. _SOKOL_PRIVATE bool _sg_validate_dispatch(int num_groups_x, int num_groups_y, int num_groups_z) {
  21600. #if !defined(SOKOL_DEBUG)
  21601. _SOKOL_UNUSED(num_groups_x);
  21602. _SOKOL_UNUSED(num_groups_y);
  21603. _SOKOL_UNUSED(num_groups_z);
  21604. return true;
  21605. #else
  21606. if (_sg.desc.disable_validation) {
  21607. return true;
  21608. }
  21609. _sg_validate_begin();
  21610. _SG_VALIDATE(_sg.cur_pass.in_pass && _sg.cur_pass.is_compute, VALIDATE_DISPATCH_COMPUTEPASS_EXPECTED);
  21611. _SG_VALIDATE((num_groups_x >= 0) && (num_groups_x < (1<<16)), VALIDATE_DISPATCH_NUMGROUPSX);
  21612. _SG_VALIDATE((num_groups_y >= 0) && (num_groups_y < (1<<16)), VALIDATE_DISPATCH_NUMGROUPSY);
  21613. _SG_VALIDATE((num_groups_z >= 0) && (num_groups_z < (1<<16)), VALIDATE_DISPATCH_NUMGROUPSZ);
  21614. _SG_VALIDATE(_sg.required_bindings_and_uniforms == _sg.applied_bindings_and_uniforms, VALIDATE_DRAW_REQUIRED_BINDINGS_OR_UNIFORMS_MISSING);
  21615. return _sg_validate_end();
  21616. #endif
  21617. }
  21618. _SOKOL_PRIVATE bool _sg_validate_update_buffer(const _sg_buffer_t* buf, const sg_range* data) {
  21619. #if !defined(SOKOL_DEBUG)
  21620. _SOKOL_UNUSED(buf);
  21621. _SOKOL_UNUSED(data);
  21622. return true;
  21623. #else
  21624. if (_sg.desc.disable_validation) {
  21625. return true;
  21626. }
  21627. SOKOL_ASSERT(buf && data && data->ptr);
  21628. _sg_validate_begin();
  21629. _SG_VALIDATE(!buf->cmn.usage.immutable, VALIDATE_UPDATEBUF_USAGE);
  21630. _SG_VALIDATE(buf->cmn.size >= (int)data->size, VALIDATE_UPDATEBUF_SIZE);
  21631. _SG_VALIDATE(buf->cmn.update_frame_index != _sg.frame_index, VALIDATE_UPDATEBUF_ONCE);
  21632. _SG_VALIDATE(buf->cmn.append_frame_index != _sg.frame_index, VALIDATE_UPDATEBUF_APPEND);
  21633. return _sg_validate_end();
  21634. #endif
  21635. }
  21636. _SOKOL_PRIVATE bool _sg_validate_append_buffer(const _sg_buffer_t* buf, const sg_range* data) {
  21637. #if !defined(SOKOL_DEBUG)
  21638. _SOKOL_UNUSED(buf);
  21639. _SOKOL_UNUSED(data);
  21640. return true;
  21641. #else
  21642. if (_sg.desc.disable_validation) {
  21643. return true;
  21644. }
  21645. SOKOL_ASSERT(buf && data && data->ptr);
  21646. _sg_validate_begin();
  21647. _SG_VALIDATE(!buf->cmn.usage.immutable, VALIDATE_APPENDBUF_USAGE);
  21648. _SG_VALIDATE(buf->cmn.size >= (buf->cmn.append_pos + (int)data->size), VALIDATE_APPENDBUF_SIZE);
  21649. _SG_VALIDATE(buf->cmn.update_frame_index != _sg.frame_index, VALIDATE_APPENDBUF_UPDATE);
  21650. return _sg_validate_end();
  21651. #endif
  21652. }
  21653. _SOKOL_PRIVATE bool _sg_validate_update_image(const _sg_image_t* img, const sg_image_data* data) {
  21654. #if !defined(SOKOL_DEBUG)
  21655. _SOKOL_UNUSED(img);
  21656. _SOKOL_UNUSED(data);
  21657. return true;
  21658. #else
  21659. if (_sg.desc.disable_validation) {
  21660. return true;
  21661. }
  21662. SOKOL_ASSERT(img && data);
  21663. _sg_validate_begin();
  21664. _SG_VALIDATE(!img->cmn.usage.immutable, VALIDATE_UPDIMG_USAGE);
  21665. _SG_VALIDATE(img->cmn.upd_frame_index != _sg.frame_index, VALIDATE_UPDIMG_ONCE);
  21666. _sg_validate_image_data(data,
  21667. img->cmn.pixel_format,
  21668. img->cmn.width,
  21669. img->cmn.height,
  21670. img->cmn.num_mipmaps,
  21671. img->cmn.num_slices);
  21672. return _sg_validate_end();
  21673. #endif
  21674. }
  21675. _SOKOL_PRIVATE bool _sg_validate_shader_binding_limits(const sg_shader_desc* desc) {
  21676. SOKOL_ASSERT(desc);
  21677. // NOTE: this validation check is also active in release mode, if a shader uses
  21678. // more bindings than allowed, shader creation will fail
  21679. int vs_num_tex = 0;
  21680. int fs_num_tex = 0;
  21681. int cs_num_tex = 0;
  21682. int vs_num_sbuf = 0;
  21683. int fs_num_sbuf = 0;
  21684. int cs_num_sbuf = 0;
  21685. int vs_num_simg = 0;
  21686. int fs_num_simg = 0;
  21687. int cs_num_simg = 0;
  21688. int vs_num_texsmp = 0;
  21689. int fs_num_texsmp = 0;
  21690. int cs_num_texsmp = 0;
  21691. for (size_t i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  21692. switch (desc->views[i].texture.stage) {
  21693. case SG_SHADERSTAGE_VERTEX: vs_num_tex++; break;
  21694. case SG_SHADERSTAGE_FRAGMENT: fs_num_tex++; break;
  21695. case SG_SHADERSTAGE_COMPUTE: cs_num_tex++; break;
  21696. default: break;
  21697. }
  21698. switch (desc->views[i].storage_buffer.stage) {
  21699. case SG_SHADERSTAGE_VERTEX: vs_num_sbuf++; break;
  21700. case SG_SHADERSTAGE_FRAGMENT: fs_num_sbuf++; break;
  21701. case SG_SHADERSTAGE_COMPUTE: cs_num_sbuf++; break;
  21702. default: break;
  21703. }
  21704. switch (desc->views[i].storage_image.stage) {
  21705. case SG_SHADERSTAGE_VERTEX: vs_num_simg++; break;
  21706. case SG_SHADERSTAGE_FRAGMENT: fs_num_simg++; break;
  21707. case SG_SHADERSTAGE_COMPUTE: cs_num_simg++; break;
  21708. default: break;
  21709. }
  21710. }
  21711. for (size_t i = 0; i < SG_MAX_TEXTURE_SAMPLER_PAIRS; i++) {
  21712. switch (desc->texture_sampler_pairs[i].stage) {
  21713. case SG_SHADERSTAGE_VERTEX: vs_num_texsmp++; break;
  21714. case SG_SHADERSTAGE_FRAGMENT: fs_num_texsmp++; break;
  21715. case SG_SHADERSTAGE_COMPUTE: cs_num_texsmp++; break;
  21716. default: break;
  21717. }
  21718. }
  21719. const int max_tex = _sg.limits.max_texture_bindings_per_stage;
  21720. const int max_sbuf = _sg.limits.max_storage_buffer_bindings_per_stage;
  21721. const int max_simg = _sg.limits.max_storage_image_bindings_per_stage;
  21722. bool retval = true;
  21723. if (vs_num_tex > max_tex) {
  21724. _SG_ERROR(SHADERDESC_TOO_MANY_VERTEXSTAGE_TEXTURES);
  21725. retval = false;
  21726. }
  21727. if (fs_num_tex > max_tex) {
  21728. _SG_ERROR(SHADERDESC_TOO_MANY_FRAGMENTSTAGE_TEXTURES);
  21729. retval = false;
  21730. }
  21731. if (cs_num_tex > max_tex) {
  21732. _SG_ERROR(SHADERDESC_TOO_MANY_COMPUTESTAGE_TEXTURES);
  21733. retval = false;
  21734. }
  21735. if (vs_num_sbuf > max_sbuf) {
  21736. _SG_ERROR(SHADERDESC_TOO_MANY_VERTEXSTAGE_STORAGEBUFFERS);
  21737. retval = false;
  21738. }
  21739. if (fs_num_sbuf > max_sbuf) {
  21740. _SG_ERROR(SHADERDESC_TOO_MANY_FRAGMENTSTAGE_STORAGEBUFFERS);
  21741. retval = false;
  21742. }
  21743. if (cs_num_sbuf > max_sbuf) {
  21744. _SG_ERROR(SHADERDESC_TOO_MANY_COMPUTESTAGE_STORAGEBUFFERS);
  21745. retval = false;
  21746. }
  21747. if (vs_num_simg > max_simg) {
  21748. _SG_ERROR(SHADERDESC_TOO_MANY_VERTEXSTAGE_STORAGEIMAGES);
  21749. retval = false;
  21750. }
  21751. if (fs_num_simg > max_simg) {
  21752. _SG_ERROR(SHADERDESC_TOO_MANY_FRAGMENTSTAGE_STORAGEIMAGES);
  21753. retval = false;
  21754. }
  21755. if (cs_num_simg > max_simg) {
  21756. _SG_ERROR(SHADERDESC_TOO_MANY_COMPUTESTAGE_STORAGEIMAGES);
  21757. retval = false;
  21758. }
  21759. if (vs_num_texsmp > max_tex) {
  21760. _SG_ERROR(SHADERDESC_TOO_MANY_VERTEXSTAGE_TEXTURESAMPLERPAIRS);
  21761. retval = false;
  21762. }
  21763. if (fs_num_texsmp > max_tex) {
  21764. _SG_ERROR(SHADERDESC_TOO_MANY_FRAGMENTSTAGE_TEXTURESAMPLERPAIRS);
  21765. retval = false;
  21766. }
  21767. if (cs_num_texsmp > max_tex) {
  21768. _SG_ERROR(SHADERDESC_TOO_MANY_COMPUTESTAGE_TEXTURESAMPLERPAIRS);
  21769. retval = false;
  21770. }
  21771. return retval;
  21772. }
  21773. _SOKOL_PRIVATE bool _sg_validate_pass_attachment_limits(const sg_pass* pass) {
  21774. SOKOL_ASSERT(pass);
  21775. int num_color_atts = 0;
  21776. int num_resolve_atts = 0;
  21777. for (int att_index = 0; att_index < SG_MAX_COLOR_ATTACHMENTS; att_index++) {
  21778. if (pass->attachments.colors[att_index].id != SG_INVALID_ID) {
  21779. num_color_atts += 1;
  21780. }
  21781. if (pass->attachments.resolves[att_index].id != SG_INVALID_ID) {
  21782. num_resolve_atts += 1;
  21783. }
  21784. }
  21785. bool retval = true;
  21786. int max_color_atts = _sg.limits.max_color_attachments;
  21787. if (num_color_atts > max_color_atts) {
  21788. _SG_ERROR(BEGINPASS_TOO_MANY_COLOR_ATTACHMENTS);
  21789. retval = false;
  21790. }
  21791. // max_color_attachments not a bug
  21792. if (num_resolve_atts > max_color_atts) {
  21793. _SG_ERROR(BEGINPASS_TOO_MANY_RESOLVE_ATTACHMENTS);
  21794. retval = false;
  21795. }
  21796. return retval;
  21797. }
  21798. // ██████ ███████ ███████ ██████ ██ ██ ██████ ██████ ███████ ███████
  21799. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  21800. // ██████ █████ ███████ ██ ██ ██ ██ ██████ ██ █████ ███████
  21801. // ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██ ██
  21802. // ██ ██ ███████ ███████ ██████ ██████ ██ ██ ██████ ███████ ███████
  21803. //
  21804. // >>resources
  21805. _SOKOL_PRIVATE sg_buffer_usage _sg_buffer_usage_defaults(const sg_buffer_usage* usg) {
  21806. sg_buffer_usage def = *usg;
  21807. if (!(def.vertex_buffer || def.index_buffer || def.storage_buffer)) {
  21808. def.vertex_buffer = true;
  21809. }
  21810. if (!(def.immutable || def.stream_update || def.dynamic_update)) {
  21811. def.immutable = true;
  21812. }
  21813. return def;
  21814. }
  21815. _SOKOL_PRIVATE sg_buffer_desc _sg_buffer_desc_defaults(const sg_buffer_desc* desc) {
  21816. sg_buffer_desc def = *desc;
  21817. def.usage = _sg_buffer_usage_defaults(&def.usage);
  21818. if (def.size == 0) {
  21819. def.size = def.data.size;
  21820. }
  21821. return def;
  21822. }
  21823. _SOKOL_PRIVATE sg_image_usage _sg_image_usage_defaults(const sg_image_usage *usg) {
  21824. sg_image_usage def = *usg;
  21825. if (!(def.immutable || def.stream_update || def.dynamic_update)) {
  21826. def.immutable = true;
  21827. }
  21828. return def;
  21829. }
  21830. _SOKOL_PRIVATE sg_image_desc _sg_image_desc_defaults(const sg_image_desc* desc) {
  21831. sg_image_desc def = *desc;
  21832. def.type = _sg_def(def.type, SG_IMAGETYPE_2D);
  21833. def.usage = _sg_image_usage_defaults(&def.usage);
  21834. def.num_slices = _sg_def(def.num_slices, def.type == SG_IMAGETYPE_CUBE ? 6 : 1);
  21835. def.num_mipmaps = _sg_def(def.num_mipmaps, 1);
  21836. if (def.usage.color_attachment || def.usage.resolve_attachment) {
  21837. def.pixel_format = _sg_def(def.pixel_format, _sg.desc.environment.defaults.color_format);
  21838. def.sample_count = _sg_def(def.sample_count, _sg.desc.environment.defaults.sample_count);
  21839. } else if (def.usage.depth_stencil_attachment) {
  21840. def.pixel_format = _sg_def(def.pixel_format, _sg.desc.environment.defaults.depth_format);
  21841. def.sample_count = _sg_def(def.sample_count, _sg.desc.environment.defaults.sample_count);
  21842. } else {
  21843. def.pixel_format = _sg_def(def.pixel_format, SG_PIXELFORMAT_RGBA8);
  21844. def.sample_count = _sg_def(def.sample_count, 1);
  21845. }
  21846. return def;
  21847. }
  21848. _SOKOL_PRIVATE sg_sampler_desc _sg_sampler_desc_defaults(const sg_sampler_desc* desc) {
  21849. sg_sampler_desc def = *desc;
  21850. def.min_filter = _sg_def(def.min_filter, SG_FILTER_NEAREST);
  21851. def.mag_filter = _sg_def(def.mag_filter, SG_FILTER_NEAREST);
  21852. def.mipmap_filter = _sg_def(def.mipmap_filter, SG_FILTER_NEAREST);
  21853. def.wrap_u = _sg_def(def.wrap_u, SG_WRAP_REPEAT);
  21854. def.wrap_v = _sg_def(def.wrap_v, SG_WRAP_REPEAT);
  21855. def.wrap_w = _sg_def(def.wrap_w, SG_WRAP_REPEAT);
  21856. def.max_lod = _sg_def_flt(def.max_lod, FLT_MAX);
  21857. def.border_color = _sg_def(def.border_color, SG_BORDERCOLOR_OPAQUE_BLACK);
  21858. def.compare = _sg_def(def.compare, SG_COMPAREFUNC_NEVER);
  21859. def.max_anisotropy = _sg_def(def.max_anisotropy, 1);
  21860. return def;
  21861. }
  21862. _SOKOL_PRIVATE sg_shader_desc _sg_shader_desc_defaults(const sg_shader_desc* desc) {
  21863. sg_shader_desc def = *desc;
  21864. #if defined(SOKOL_METAL)
  21865. def.vertex_func.entry = _sg_def(def.vertex_func.entry, "_main");
  21866. def.fragment_func.entry = _sg_def(def.fragment_func.entry, "_main");
  21867. def.compute_func.entry = _sg_def(def.compute_func.entry, "_main");
  21868. #else
  21869. def.vertex_func.entry = _sg_def(def.vertex_func.entry, "main");
  21870. def.fragment_func.entry = _sg_def(def.fragment_func.entry, "main");
  21871. def.compute_func.entry = _sg_def(def.compute_func.entry, "main");
  21872. #endif
  21873. #if defined(SOKOL_D3D11)
  21874. if (def.vertex_func.source) {
  21875. def.vertex_func.d3d11_target = _sg_def(def.vertex_func.d3d11_target, "vs_4_0");
  21876. }
  21877. if (def.fragment_func.source) {
  21878. def.fragment_func.d3d11_target = _sg_def(def.fragment_func.d3d11_target, "ps_4_0");
  21879. }
  21880. if (def.compute_func.source) {
  21881. def.compute_func.d3d11_target = _sg_def(def.fragment_func.d3d11_target,"cs_5_0");
  21882. }
  21883. #endif
  21884. def.mtl_threads_per_threadgroup.y = _sg_def(desc->mtl_threads_per_threadgroup.y, 1);
  21885. def.mtl_threads_per_threadgroup.z = _sg_def(desc->mtl_threads_per_threadgroup.z, 1);
  21886. for (size_t ub_index = 0; ub_index < SG_MAX_UNIFORMBLOCK_BINDSLOTS; ub_index++) {
  21887. sg_shader_uniform_block* ub_desc = &def.uniform_blocks[ub_index];
  21888. if (ub_desc->stage != SG_SHADERSTAGE_NONE) {
  21889. ub_desc->layout = _sg_def(ub_desc->layout, SG_UNIFORMLAYOUT_NATIVE);
  21890. for (size_t u_index = 0; u_index < SG_MAX_UNIFORMBLOCK_MEMBERS; u_index++) {
  21891. sg_glsl_shader_uniform* u_desc = &ub_desc->glsl_uniforms[u_index];
  21892. if (u_desc->type == SG_UNIFORMTYPE_INVALID) {
  21893. break;
  21894. }
  21895. u_desc->array_count = _sg_def(u_desc->array_count, 1);
  21896. }
  21897. }
  21898. }
  21899. for (size_t view_index = 0; view_index < SG_MAX_VIEW_BINDSLOTS; view_index++) {
  21900. sg_shader_view* view_desc = &def.views[view_index];
  21901. if (view_desc->texture.stage != SG_SHADERSTAGE_NONE) {
  21902. view_desc->texture.image_type = _sg_def(view_desc->texture.image_type, SG_IMAGETYPE_2D);
  21903. view_desc->texture.sample_type = _sg_def(view_desc->texture.sample_type, SG_IMAGESAMPLETYPE_FLOAT);
  21904. } else if (view_desc->storage_image.stage != SG_SHADERSTAGE_NONE) {
  21905. view_desc->storage_image.image_type = _sg_def(view_desc->storage_image.image_type, SG_IMAGETYPE_2D);
  21906. }
  21907. }
  21908. for (size_t smp_index = 0; smp_index < SG_MAX_SAMPLER_BINDSLOTS; smp_index++) {
  21909. sg_shader_sampler* smp_desc = &def.samplers[smp_index];
  21910. if (smp_desc->stage != SG_SHADERSTAGE_NONE) {
  21911. smp_desc->sampler_type = _sg_def(smp_desc->sampler_type, SG_SAMPLERTYPE_FILTERING);
  21912. }
  21913. }
  21914. return def;
  21915. }
  21916. _SOKOL_PRIVATE sg_pipeline_desc _sg_pipeline_desc_defaults(const sg_pipeline_desc* desc) {
  21917. sg_pipeline_desc def = *desc;
  21918. // FIXME: should we actually do all this stuff for a compute pipeline?
  21919. def.primitive_type = _sg_def(def.primitive_type, SG_PRIMITIVETYPE_TRIANGLES);
  21920. def.index_type = _sg_def(def.index_type, SG_INDEXTYPE_NONE);
  21921. def.cull_mode = _sg_def(def.cull_mode, SG_CULLMODE_NONE);
  21922. def.face_winding = _sg_def(def.face_winding, SG_FACEWINDING_CW);
  21923. def.sample_count = _sg_def(def.sample_count, _sg.desc.environment.defaults.sample_count);
  21924. def.stencil.front.compare = _sg_def(def.stencil.front.compare, SG_COMPAREFUNC_ALWAYS);
  21925. def.stencil.front.fail_op = _sg_def(def.stencil.front.fail_op, SG_STENCILOP_KEEP);
  21926. def.stencil.front.depth_fail_op = _sg_def(def.stencil.front.depth_fail_op, SG_STENCILOP_KEEP);
  21927. def.stencil.front.pass_op = _sg_def(def.stencil.front.pass_op, SG_STENCILOP_KEEP);
  21928. def.stencil.back.compare = _sg_def(def.stencil.back.compare, SG_COMPAREFUNC_ALWAYS);
  21929. def.stencil.back.fail_op = _sg_def(def.stencil.back.fail_op, SG_STENCILOP_KEEP);
  21930. def.stencil.back.depth_fail_op = _sg_def(def.stencil.back.depth_fail_op, SG_STENCILOP_KEEP);
  21931. def.stencil.back.pass_op = _sg_def(def.stencil.back.pass_op, SG_STENCILOP_KEEP);
  21932. def.depth.compare = _sg_def(def.depth.compare, SG_COMPAREFUNC_ALWAYS);
  21933. def.depth.pixel_format = _sg_def(def.depth.pixel_format, _sg.desc.environment.defaults.depth_format);
  21934. if (def.colors[0].pixel_format == SG_PIXELFORMAT_NONE) {
  21935. // special case depth-only rendering, enforce a color count of 0
  21936. def.color_count = 0;
  21937. } else {
  21938. def.color_count = _sg_def(def.color_count, 1);
  21939. }
  21940. if (def.color_count > SG_MAX_COLOR_ATTACHMENTS) {
  21941. def.color_count = SG_MAX_COLOR_ATTACHMENTS;
  21942. }
  21943. for (int i = 0; i < def.color_count; i++) {
  21944. sg_color_target_state* cs = &def.colors[i];
  21945. cs->pixel_format = _sg_def(cs->pixel_format, _sg.desc.environment.defaults.color_format);
  21946. cs->write_mask = _sg_def(cs->write_mask, SG_COLORMASK_RGBA);
  21947. sg_blend_state* bs = &def.colors[i].blend;
  21948. bs->op_rgb = _sg_def(bs->op_rgb, SG_BLENDOP_ADD);
  21949. bs->src_factor_rgb = _sg_def(bs->src_factor_rgb, SG_BLENDFACTOR_ONE);
  21950. if ((bs->op_rgb == SG_BLENDOP_MIN) || (bs->op_rgb == SG_BLENDOP_MAX)) {
  21951. bs->dst_factor_rgb = _sg_def(bs->dst_factor_rgb, SG_BLENDFACTOR_ONE);
  21952. } else {
  21953. bs->dst_factor_rgb = _sg_def(bs->dst_factor_rgb, SG_BLENDFACTOR_ZERO);
  21954. }
  21955. bs->op_alpha = _sg_def(bs->op_alpha, SG_BLENDOP_ADD);
  21956. bs->src_factor_alpha = _sg_def(bs->src_factor_alpha, SG_BLENDFACTOR_ONE);
  21957. if ((bs->op_alpha == SG_BLENDOP_MIN) || (bs->op_alpha == SG_BLENDOP_MAX)) {
  21958. bs->dst_factor_alpha = _sg_def(bs->dst_factor_alpha, SG_BLENDFACTOR_ONE);
  21959. } else {
  21960. bs->dst_factor_alpha = _sg_def(bs->dst_factor_alpha, SG_BLENDFACTOR_ZERO);
  21961. }
  21962. }
  21963. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  21964. sg_vertex_attr_state* a_state = &def.layout.attrs[attr_index];
  21965. if (a_state->format == SG_VERTEXFORMAT_INVALID) {
  21966. break;
  21967. }
  21968. SOKOL_ASSERT((a_state->buffer_index >= 0) && (a_state->buffer_index < SG_MAX_VERTEXBUFFER_BINDSLOTS));
  21969. sg_vertex_buffer_layout_state* l_state = &def.layout.buffers[a_state->buffer_index];
  21970. l_state->step_func = _sg_def(l_state->step_func, SG_VERTEXSTEP_PER_VERTEX);
  21971. l_state->step_rate = _sg_def(l_state->step_rate, 1);
  21972. }
  21973. // resolve vertex layout strides and offsets
  21974. _SG_STRUCT(int, auto_offset[SG_MAX_VERTEXBUFFER_BINDSLOTS]);
  21975. bool use_auto_offset = true;
  21976. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  21977. // to use computed offsets, *all* attr offsets must be 0
  21978. if (def.layout.attrs[attr_index].offset != 0) {
  21979. use_auto_offset = false;
  21980. }
  21981. }
  21982. for (int attr_index = 0; attr_index < SG_MAX_VERTEX_ATTRIBUTES; attr_index++) {
  21983. sg_vertex_attr_state* a_state = &def.layout.attrs[attr_index];
  21984. if (a_state->format == SG_VERTEXFORMAT_INVALID) {
  21985. break;
  21986. }
  21987. SOKOL_ASSERT((a_state->buffer_index >= 0) && (a_state->buffer_index < SG_MAX_VERTEXBUFFER_BINDSLOTS));
  21988. if (use_auto_offset) {
  21989. a_state->offset = auto_offset[a_state->buffer_index];
  21990. }
  21991. auto_offset[a_state->buffer_index] += _sg_vertexformat_bytesize(a_state->format);
  21992. }
  21993. // compute vertex strides if needed
  21994. for (int buf_index = 0; buf_index < SG_MAX_VERTEXBUFFER_BINDSLOTS; buf_index++) {
  21995. sg_vertex_buffer_layout_state* l_state = &def.layout.buffers[buf_index];
  21996. if (l_state->stride == 0) {
  21997. l_state->stride = auto_offset[buf_index];
  21998. }
  21999. }
  22000. return def;
  22001. }
  22002. _SOKOL_PRIVATE sg_view_desc _sg_view_desc_defaults(const sg_view_desc* desc) {
  22003. sg_view_desc def = *desc;
  22004. return def;
  22005. }
  22006. _SOKOL_PRIVATE sg_buffer _sg_alloc_buffer(void) {
  22007. sg_buffer res;
  22008. int slot_index = _sg_pool_alloc_index(&_sg.pools.buffer_pool);
  22009. if (_SG_INVALID_SLOT_INDEX != slot_index) {
  22010. res.id = _sg_slot_alloc(&_sg.pools.buffer_pool, &_sg.pools.buffers[slot_index].slot, slot_index);
  22011. _sg_resource_stats_inc(buffers.allocated);
  22012. } else {
  22013. res.id = SG_INVALID_ID;
  22014. _SG_ERROR(BUFFER_POOL_EXHAUSTED);
  22015. }
  22016. return res;
  22017. }
  22018. _SOKOL_PRIVATE sg_image _sg_alloc_image(void) {
  22019. sg_image res;
  22020. int slot_index = _sg_pool_alloc_index(&_sg.pools.image_pool);
  22021. if (_SG_INVALID_SLOT_INDEX != slot_index) {
  22022. res.id = _sg_slot_alloc(&_sg.pools.image_pool, &_sg.pools.images[slot_index].slot, slot_index);
  22023. _sg_resource_stats_inc(images.allocated);
  22024. } else {
  22025. res.id = SG_INVALID_ID;
  22026. _SG_ERROR(IMAGE_POOL_EXHAUSTED);
  22027. }
  22028. return res;
  22029. }
  22030. _SOKOL_PRIVATE sg_sampler _sg_alloc_sampler(void) {
  22031. sg_sampler res;
  22032. int slot_index = _sg_pool_alloc_index(&_sg.pools.sampler_pool);
  22033. if (_SG_INVALID_SLOT_INDEX != slot_index) {
  22034. res.id = _sg_slot_alloc(&_sg.pools.sampler_pool, &_sg.pools.samplers[slot_index].slot, slot_index);
  22035. _sg_resource_stats_inc(samplers.allocated);
  22036. } else {
  22037. res.id = SG_INVALID_ID;
  22038. _SG_ERROR(SAMPLER_POOL_EXHAUSTED);
  22039. }
  22040. return res;
  22041. }
  22042. _SOKOL_PRIVATE sg_shader _sg_alloc_shader(void) {
  22043. sg_shader res;
  22044. int slot_index = _sg_pool_alloc_index(&_sg.pools.shader_pool);
  22045. if (_SG_INVALID_SLOT_INDEX != slot_index) {
  22046. res.id = _sg_slot_alloc(&_sg.pools.shader_pool, &_sg.pools.shaders[slot_index].slot, slot_index);
  22047. _sg_resource_stats_inc(shaders.allocated);
  22048. } else {
  22049. res.id = SG_INVALID_ID;
  22050. _SG_ERROR(SHADER_POOL_EXHAUSTED);
  22051. }
  22052. return res;
  22053. }
  22054. _SOKOL_PRIVATE sg_pipeline _sg_alloc_pipeline(void) {
  22055. sg_pipeline res;
  22056. int slot_index = _sg_pool_alloc_index(&_sg.pools.pipeline_pool);
  22057. if (_SG_INVALID_SLOT_INDEX != slot_index) {
  22058. res.id =_sg_slot_alloc(&_sg.pools.pipeline_pool, &_sg.pools.pipelines[slot_index].slot, slot_index);
  22059. _sg_resource_stats_inc(pipelines.allocated);
  22060. } else {
  22061. res.id = SG_INVALID_ID;
  22062. _SG_ERROR(PIPELINE_POOL_EXHAUSTED);
  22063. }
  22064. return res;
  22065. }
  22066. _SOKOL_PRIVATE sg_view _sg_alloc_view(void) {
  22067. sg_view res;
  22068. int slot_index = _sg_pool_alloc_index(&_sg.pools.view_pool);
  22069. if (_SG_INVALID_SLOT_INDEX != slot_index) {
  22070. res.id = _sg_slot_alloc(&_sg.pools.view_pool, &_sg.pools.views[slot_index].slot, slot_index);
  22071. _sg_resource_stats_inc(views.allocated);
  22072. } else {
  22073. res.id = SG_INVALID_ID;
  22074. _SG_ERROR(VIEW_POOL_EXHAUSTED);
  22075. }
  22076. return res;
  22077. }
  22078. _SOKOL_PRIVATE void _sg_dealloc_buffer(_sg_buffer_t* buf) {
  22079. SOKOL_ASSERT(buf && (buf->slot.state == SG_RESOURCESTATE_ALLOC) && (buf->slot.id != SG_INVALID_ID));
  22080. _sg_pool_free_index(&_sg.pools.buffer_pool, _sg_slot_index(buf->slot.id));
  22081. _sg_slot_reset(&buf->slot);
  22082. _sg_resource_stats_inc(buffers.deallocated);
  22083. }
  22084. _SOKOL_PRIVATE void _sg_dealloc_image(_sg_image_t* img) {
  22085. SOKOL_ASSERT(img && (img->slot.state == SG_RESOURCESTATE_ALLOC) && (img->slot.id != SG_INVALID_ID));
  22086. _sg_pool_free_index(&_sg.pools.image_pool, _sg_slot_index(img->slot.id));
  22087. _sg_slot_reset(&img->slot);
  22088. _sg_resource_stats_inc(images.deallocated);
  22089. }
  22090. _SOKOL_PRIVATE void _sg_dealloc_sampler(_sg_sampler_t* smp) {
  22091. SOKOL_ASSERT(smp && (smp->slot.state == SG_RESOURCESTATE_ALLOC) && (smp->slot.id != SG_INVALID_ID));
  22092. _sg_pool_free_index(&_sg.pools.sampler_pool, _sg_slot_index(smp->slot.id));
  22093. _sg_slot_reset(&smp->slot);
  22094. _sg_resource_stats_inc(samplers.deallocated);
  22095. }
  22096. _SOKOL_PRIVATE void _sg_dealloc_shader(_sg_shader_t* shd) {
  22097. SOKOL_ASSERT(shd && (shd->slot.state == SG_RESOURCESTATE_ALLOC) && (shd->slot.id != SG_INVALID_ID));
  22098. _sg_pool_free_index(&_sg.pools.shader_pool, _sg_slot_index(shd->slot.id));
  22099. _sg_slot_reset(&shd->slot);
  22100. _sg_resource_stats_inc(shaders.deallocated);
  22101. }
  22102. _SOKOL_PRIVATE void _sg_dealloc_pipeline(_sg_pipeline_t* pip) {
  22103. SOKOL_ASSERT(pip && (pip->slot.state == SG_RESOURCESTATE_ALLOC) && (pip->slot.id != SG_INVALID_ID));
  22104. _sg_pool_free_index(&_sg.pools.pipeline_pool, _sg_slot_index(pip->slot.id));
  22105. _sg_slot_reset(&pip->slot);
  22106. _sg_resource_stats_inc(pipelines.deallocated);
  22107. }
  22108. _SOKOL_PRIVATE void _sg_dealloc_view(_sg_view_t* view) {
  22109. SOKOL_ASSERT(view && (view->slot.state == SG_RESOURCESTATE_ALLOC) && (view->slot.id != SG_INVALID_ID));
  22110. _sg_pool_free_index(&_sg.pools.view_pool, _sg_slot_index(view->slot.id));
  22111. _sg_slot_reset(&view->slot);
  22112. _sg_resource_stats_inc(views.deallocated);
  22113. }
  22114. _SOKOL_PRIVATE void _sg_init_buffer(_sg_buffer_t* buf, const sg_buffer_desc* desc) {
  22115. SOKOL_ASSERT(buf && (buf->slot.state == SG_RESOURCESTATE_ALLOC));
  22116. SOKOL_ASSERT(desc);
  22117. if (_sg_validate_buffer_desc(desc)) {
  22118. _sg_buffer_common_init(&buf->cmn, desc);
  22119. buf->slot.state = _sg_create_buffer(buf, desc);
  22120. } else {
  22121. buf->slot.state = SG_RESOURCESTATE_FAILED;
  22122. }
  22123. SOKOL_ASSERT((buf->slot.state == SG_RESOURCESTATE_VALID)||(buf->slot.state == SG_RESOURCESTATE_FAILED));
  22124. _sg_resource_stats_inc(buffers.inited);
  22125. }
  22126. _SOKOL_PRIVATE void _sg_init_image(_sg_image_t* img, const sg_image_desc* desc) {
  22127. SOKOL_ASSERT(img && (img->slot.state == SG_RESOURCESTATE_ALLOC));
  22128. SOKOL_ASSERT(desc);
  22129. if (_sg_validate_image_desc(desc)) {
  22130. _sg_image_common_init(&img->cmn, desc);
  22131. img->slot.state = _sg_create_image(img, desc);
  22132. } else {
  22133. img->slot.state = SG_RESOURCESTATE_FAILED;
  22134. }
  22135. SOKOL_ASSERT((img->slot.state == SG_RESOURCESTATE_VALID)||(img->slot.state == SG_RESOURCESTATE_FAILED));
  22136. _sg_resource_stats_inc(images.inited);
  22137. }
  22138. _SOKOL_PRIVATE void _sg_init_sampler(_sg_sampler_t* smp, const sg_sampler_desc* desc) {
  22139. SOKOL_ASSERT(smp && (smp->slot.state == SG_RESOURCESTATE_ALLOC));
  22140. SOKOL_ASSERT(desc);
  22141. if (_sg_validate_sampler_desc(desc)) {
  22142. _sg_sampler_common_init(&smp->cmn, desc);
  22143. smp->slot.state = _sg_create_sampler(smp, desc);
  22144. } else {
  22145. smp->slot.state = SG_RESOURCESTATE_FAILED;
  22146. }
  22147. SOKOL_ASSERT((smp->slot.state == SG_RESOURCESTATE_VALID)||(smp->slot.state == SG_RESOURCESTATE_FAILED));
  22148. _sg_resource_stats_inc(samplers.inited);
  22149. }
  22150. _SOKOL_PRIVATE void _sg_init_shader(_sg_shader_t* shd, const sg_shader_desc* desc) {
  22151. SOKOL_ASSERT(shd && (shd->slot.state == SG_RESOURCESTATE_ALLOC));
  22152. SOKOL_ASSERT(desc);
  22153. if (!_sg_validate_shader_desc(desc)) {
  22154. shd->slot.state = SG_RESOURCESTATE_FAILED;
  22155. return;
  22156. }
  22157. if (!_sg_validate_shader_binding_limits(desc)) {
  22158. shd->slot.state = SG_RESOURCESTATE_FAILED;
  22159. return;
  22160. }
  22161. _sg_shader_common_init(&shd->cmn, desc);
  22162. shd->slot.state = _sg_create_shader(shd, desc);
  22163. SOKOL_ASSERT((shd->slot.state == SG_RESOURCESTATE_VALID)||(shd->slot.state == SG_RESOURCESTATE_FAILED));
  22164. _sg_resource_stats_inc(shaders.inited);
  22165. }
  22166. _SOKOL_PRIVATE void _sg_init_pipeline(_sg_pipeline_t* pip, const sg_pipeline_desc* desc) {
  22167. SOKOL_ASSERT(pip && (pip->slot.state == SG_RESOURCESTATE_ALLOC));
  22168. SOKOL_ASSERT(desc);
  22169. if (_sg_validate_pipeline_desc(desc)) {
  22170. _sg_shader_t* shd = _sg_lookup_shader(desc->shader.id);
  22171. if (shd && (shd->slot.state == SG_RESOURCESTATE_VALID)) {
  22172. _sg_pipeline_common_init(&pip->cmn, desc, shd);
  22173. pip->slot.state = _sg_create_pipeline(pip, desc);
  22174. } else {
  22175. pip->slot.state = SG_RESOURCESTATE_FAILED;
  22176. }
  22177. } else {
  22178. pip->slot.state = SG_RESOURCESTATE_FAILED;
  22179. }
  22180. SOKOL_ASSERT((pip->slot.state == SG_RESOURCESTATE_VALID)||(pip->slot.state == SG_RESOURCESTATE_FAILED));
  22181. _sg_resource_stats_inc(pipelines.inited);
  22182. }
  22183. _SOKOL_PRIVATE void _sg_init_view(_sg_view_t* view, const sg_view_desc* desc) {
  22184. SOKOL_ASSERT(view && view->slot.state == SG_RESOURCESTATE_ALLOC);
  22185. SOKOL_ASSERT(desc);
  22186. if (_sg_validate_view_desc(desc)) {
  22187. uint32_t buf_id = desc->storage_buffer.buffer.id;
  22188. uint32_t img_id = desc->texture.image.id;
  22189. img_id = img_id ? img_id : desc->storage_image.image.id;
  22190. img_id = img_id ? img_id : desc->color_attachment.image.id;
  22191. img_id = img_id ? img_id : desc->resolve_attachment.image.id;
  22192. img_id = img_id ? img_id : desc->depth_stencil_attachment.image.id;
  22193. _sg_buffer_t* buf = buf_id ? _sg_lookup_buffer(buf_id) : 0;
  22194. _sg_image_t* img = img_id ? _sg_lookup_image(img_id) : 0;
  22195. sg_resource_state res_state = SG_RESOURCESTATE_INVALID;
  22196. if (buf) {
  22197. SOKOL_ASSERT(!img);
  22198. res_state = buf->slot.state;
  22199. } else if (img) {
  22200. SOKOL_ASSERT(!buf);
  22201. res_state = img->slot.state;
  22202. }
  22203. if (res_state == SG_RESOURCESTATE_VALID) {
  22204. _sg_view_common_init(&view->cmn, desc, buf, img);
  22205. view->slot.state = _sg_create_view(view, desc);
  22206. } else {
  22207. view->slot.state = SG_RESOURCESTATE_FAILED;
  22208. }
  22209. } else {
  22210. view->slot.state = SG_RESOURCESTATE_FAILED;
  22211. }
  22212. SOKOL_ASSERT((view->slot.state == SG_RESOURCESTATE_VALID) || (view->slot.state == SG_RESOURCESTATE_FAILED));
  22213. _sg_resource_stats_inc(views.inited);
  22214. }
  22215. _SOKOL_PRIVATE void _sg_uninit_buffer(_sg_buffer_t* buf) {
  22216. SOKOL_ASSERT(buf && ((buf->slot.state == SG_RESOURCESTATE_VALID) || (buf->slot.state == SG_RESOURCESTATE_FAILED)));
  22217. _sg_discard_buffer(buf);
  22218. _sg_reset_buffer_to_alloc_state(buf);
  22219. _sg_resource_stats_inc(buffers.uninited);
  22220. }
  22221. _SOKOL_PRIVATE void _sg_uninit_image(_sg_image_t* img) {
  22222. SOKOL_ASSERT(img && ((img->slot.state == SG_RESOURCESTATE_VALID) || (img->slot.state == SG_RESOURCESTATE_FAILED)));
  22223. _sg_discard_image(img);
  22224. _sg_reset_image_to_alloc_state(img);
  22225. _sg_resource_stats_inc(images.uninited);
  22226. }
  22227. _SOKOL_PRIVATE void _sg_uninit_sampler(_sg_sampler_t* smp) {
  22228. SOKOL_ASSERT(smp && ((smp->slot.state == SG_RESOURCESTATE_VALID) || (smp->slot.state == SG_RESOURCESTATE_FAILED)));
  22229. _sg_discard_sampler(smp);
  22230. _sg_reset_sampler_to_alloc_state(smp);
  22231. _sg_resource_stats_inc(samplers.uninited);
  22232. }
  22233. _SOKOL_PRIVATE void _sg_uninit_shader(_sg_shader_t* shd) {
  22234. SOKOL_ASSERT(shd && ((shd->slot.state == SG_RESOURCESTATE_VALID) || (shd->slot.state == SG_RESOURCESTATE_FAILED)));
  22235. _sg_discard_shader(shd);
  22236. _sg_reset_shader_to_alloc_state(shd);
  22237. _sg_resource_stats_inc(shaders.uninited);
  22238. }
  22239. _SOKOL_PRIVATE void _sg_uninit_pipeline(_sg_pipeline_t* pip) {
  22240. SOKOL_ASSERT(pip && ((pip->slot.state == SG_RESOURCESTATE_VALID) || (pip->slot.state == SG_RESOURCESTATE_FAILED)));
  22241. _sg_discard_pipeline(pip);
  22242. _sg_reset_pipeline_to_alloc_state(pip);
  22243. _sg_resource_stats_inc(pipelines.uninited);
  22244. }
  22245. _SOKOL_PRIVATE void _sg_uninit_view(_sg_view_t* view) {
  22246. SOKOL_ASSERT(view && ((view->slot.state == SG_RESOURCESTATE_VALID) || (view->slot.state == SG_RESOURCESTATE_FAILED)));
  22247. _sg_discard_view(view);
  22248. _sg_reset_view_to_alloc_state(view);
  22249. _sg_resource_stats_inc(views.uninited);
  22250. }
  22251. _SOKOL_PRIVATE void _sg_setup_commit_listeners(const sg_desc* desc) {
  22252. SOKOL_ASSERT(desc->max_commit_listeners > 0);
  22253. SOKOL_ASSERT(0 == _sg.commit_listeners.items);
  22254. SOKOL_ASSERT(0 == _sg.commit_listeners.num);
  22255. SOKOL_ASSERT(0 == _sg.commit_listeners.upper);
  22256. _sg.commit_listeners.num = desc->max_commit_listeners;
  22257. const size_t size = (size_t)_sg.commit_listeners.num * sizeof(sg_commit_listener);
  22258. _sg.commit_listeners.items = (sg_commit_listener*)_sg_malloc_clear(size);
  22259. }
  22260. _SOKOL_PRIVATE void _sg_discard_commit_listeners(void) {
  22261. SOKOL_ASSERT(0 != _sg.commit_listeners.items);
  22262. _sg_free(_sg.commit_listeners.items);
  22263. _sg.commit_listeners.items = 0;
  22264. }
  22265. _SOKOL_PRIVATE void _sg_notify_commit_listeners(void) {
  22266. SOKOL_ASSERT(_sg.commit_listeners.items);
  22267. for (int i = 0; i < _sg.commit_listeners.upper; i++) {
  22268. const sg_commit_listener* listener = &_sg.commit_listeners.items[i];
  22269. if (listener->func) {
  22270. listener->func(listener->user_data);
  22271. }
  22272. }
  22273. }
  22274. _SOKOL_PRIVATE bool _sg_add_commit_listener(const sg_commit_listener* new_listener) {
  22275. SOKOL_ASSERT(new_listener && new_listener->func);
  22276. SOKOL_ASSERT(_sg.commit_listeners.items);
  22277. // first check if the listener hadn't been added already
  22278. for (int i = 0; i < _sg.commit_listeners.upper; i++) {
  22279. const sg_commit_listener* slot = &_sg.commit_listeners.items[i];
  22280. if ((slot->func == new_listener->func) && (slot->user_data == new_listener->user_data)) {
  22281. _SG_ERROR(IDENTICAL_COMMIT_LISTENER);
  22282. return false;
  22283. }
  22284. }
  22285. // first try to plug a hole
  22286. sg_commit_listener* slot = 0;
  22287. for (int i = 0; i < _sg.commit_listeners.upper; i++) {
  22288. if (_sg.commit_listeners.items[i].func == 0) {
  22289. slot = &_sg.commit_listeners.items[i];
  22290. break;
  22291. }
  22292. }
  22293. if (!slot) {
  22294. // append to end
  22295. if (_sg.commit_listeners.upper < _sg.commit_listeners.num) {
  22296. slot = &_sg.commit_listeners.items[_sg.commit_listeners.upper++];
  22297. }
  22298. }
  22299. if (!slot) {
  22300. _SG_ERROR(COMMIT_LISTENER_ARRAY_FULL);
  22301. return false;
  22302. }
  22303. *slot = *new_listener;
  22304. return true;
  22305. }
  22306. _SOKOL_PRIVATE bool _sg_remove_commit_listener(const sg_commit_listener* listener) {
  22307. SOKOL_ASSERT(listener && listener->func);
  22308. SOKOL_ASSERT(_sg.commit_listeners.items);
  22309. for (int i = 0; i < _sg.commit_listeners.upper; i++) {
  22310. sg_commit_listener* slot = &_sg.commit_listeners.items[i];
  22311. // both the function pointer and user data must match!
  22312. if ((slot->func == listener->func) && (slot->user_data == listener->user_data)) {
  22313. slot->func = 0;
  22314. slot->user_data = 0;
  22315. // NOTE: since _sg_add_commit_listener() already catches duplicates,
  22316. // we don't need to worry about them here
  22317. return true;
  22318. }
  22319. }
  22320. return false;
  22321. }
  22322. _SOKOL_PRIVATE sg_desc _sg_desc_defaults(const sg_desc* desc) {
  22323. /*
  22324. NOTE: on WebGPU, the default color pixel format MUST be provided,
  22325. it cannot be a default compile-time constant.
  22326. */
  22327. sg_desc res = *desc;
  22328. #if defined(SOKOL_WGPU)
  22329. SOKOL_ASSERT(SG_PIXELFORMAT_NONE < res.environment.defaults.color_format);
  22330. #elif defined(SOKOL_METAL) || defined(SOKOL_D3D11)
  22331. res.environment.defaults.color_format = _sg_def(res.environment.defaults.color_format, SG_PIXELFORMAT_BGRA8);
  22332. #else
  22333. res.environment.defaults.color_format = _sg_def(res.environment.defaults.color_format, SG_PIXELFORMAT_RGBA8);
  22334. #endif
  22335. res.environment.defaults.depth_format = _sg_def(res.environment.defaults.depth_format, SG_PIXELFORMAT_DEPTH_STENCIL);
  22336. res.environment.defaults.sample_count = _sg_def(res.environment.defaults.sample_count, 1);
  22337. res.buffer_pool_size = _sg_def(res.buffer_pool_size, _SG_DEFAULT_BUFFER_POOL_SIZE);
  22338. res.image_pool_size = _sg_def(res.image_pool_size, _SG_DEFAULT_IMAGE_POOL_SIZE);
  22339. res.sampler_pool_size = _sg_def(res.sampler_pool_size, _SG_DEFAULT_SAMPLER_POOL_SIZE);
  22340. res.shader_pool_size = _sg_def(res.shader_pool_size, _SG_DEFAULT_SHADER_POOL_SIZE);
  22341. res.pipeline_pool_size = _sg_def(res.pipeline_pool_size, _SG_DEFAULT_PIPELINE_POOL_SIZE);
  22342. res.view_pool_size = _sg_def(res.view_pool_size, _SG_DEFAULT_VIEW_POOL_SIZE);
  22343. res.uniform_buffer_size = _sg_def(res.uniform_buffer_size, _SG_DEFAULT_UB_SIZE);
  22344. res.max_commit_listeners = _sg_def(res.max_commit_listeners, _SG_DEFAULT_MAX_COMMIT_LISTENERS);
  22345. res.wgpu.bindgroups_cache_size = _sg_def(res.wgpu.bindgroups_cache_size, _SG_DEFAULT_WGPU_BINDGROUP_CACHE_SIZE);
  22346. res.vulkan.copy_staging_buffer_size = _sg_def(res.vulkan.copy_staging_buffer_size, _SG_DEFAULT_VK_COPY_STAGING_SIZE);
  22347. res.vulkan.stream_staging_buffer_size = _sg_def(res.vulkan.stream_staging_buffer_size, _SG_DEFAULT_VK_STREAM_STAGING_SIZE);
  22348. res.vulkan.descriptor_buffer_size = _sg_def(res.vulkan.descriptor_buffer_size, _SG_DEFAULT_VK_DESCRIPTOR_BUFFER_SIZE);
  22349. return res;
  22350. }
  22351. _SOKOL_PRIVATE sg_pass _sg_pass_defaults(const sg_pass* pass) {
  22352. sg_pass res = *pass;
  22353. if (!res.compute) {
  22354. if (_sg_attachments_empty(&pass->attachments)) {
  22355. // this is a swapchain-pass
  22356. res.swapchain.sample_count = _sg_def(res.swapchain.sample_count, _sg.desc.environment.defaults.sample_count);
  22357. res.swapchain.color_format = _sg_def(res.swapchain.color_format, _sg.desc.environment.defaults.color_format);
  22358. res.swapchain.depth_format = _sg_def(res.swapchain.depth_format, _sg.desc.environment.defaults.depth_format);
  22359. }
  22360. res.action = _sg_pass_action_defaults(&res.action);
  22361. }
  22362. return res;
  22363. }
  22364. _SOKOL_PRIVATE void _sg_discard_all_resources(void) {
  22365. /* this is a bit dumb since it loops over all pool slots to
  22366. find the occupied slots, on the other hand it is only ever
  22367. executed at shutdown
  22368. NOTE: ONLY EXECUTE THIS AT SHUTDOWN
  22369. ...because the free queues will not be reset
  22370. and the resource slots not be cleared!
  22371. */
  22372. for (int i = 1; i < _sg.pools.buffer_pool.size; i++) {
  22373. sg_resource_state state = _sg.pools.buffers[i].slot.state;
  22374. if ((state == SG_RESOURCESTATE_VALID) || (state == SG_RESOURCESTATE_FAILED)) {
  22375. _sg_discard_buffer(&_sg.pools.buffers[i]);
  22376. }
  22377. }
  22378. for (int i = 1; i < _sg.pools.image_pool.size; i++) {
  22379. sg_resource_state state = _sg.pools.images[i].slot.state;
  22380. if ((state == SG_RESOURCESTATE_VALID) || (state == SG_RESOURCESTATE_FAILED)) {
  22381. _sg_discard_image(&_sg.pools.images[i]);
  22382. }
  22383. }
  22384. for (int i = 1; i < _sg.pools.sampler_pool.size; i++) {
  22385. sg_resource_state state = _sg.pools.samplers[i].slot.state;
  22386. if ((state == SG_RESOURCESTATE_VALID) || (state == SG_RESOURCESTATE_FAILED)) {
  22387. _sg_discard_sampler(&_sg.pools.samplers[i]);
  22388. }
  22389. }
  22390. for (int i = 1; i < _sg.pools.shader_pool.size; i++) {
  22391. sg_resource_state state = _sg.pools.shaders[i].slot.state;
  22392. if ((state == SG_RESOURCESTATE_VALID) || (state == SG_RESOURCESTATE_FAILED)) {
  22393. _sg_discard_shader(&_sg.pools.shaders[i]);
  22394. }
  22395. }
  22396. for (int i = 1; i < _sg.pools.pipeline_pool.size; i++) {
  22397. sg_resource_state state = _sg.pools.pipelines[i].slot.state;
  22398. if ((state == SG_RESOURCESTATE_VALID) || (state == SG_RESOURCESTATE_FAILED)) {
  22399. _sg_discard_pipeline(&_sg.pools.pipelines[i]);
  22400. }
  22401. }
  22402. for (int i = 1; i < _sg.pools.view_pool.size; i++) {
  22403. sg_resource_state state = _sg.pools.views[i].slot.state;
  22404. if ((state == SG_RESOURCESTATE_VALID) || (state == SG_RESOURCESTATE_FAILED)) {
  22405. _sg_discard_view(&_sg.pools.views[i]);
  22406. }
  22407. }
  22408. }
  22409. _SOKOL_PRIVATE void _sg_override_portable_limits(void) {
  22410. if (_sg.desc.enforce_portable_limits) {
  22411. _sg.limits.max_color_attachments = SG_MAX_PORTABLE_COLOR_ATTACHMENTS;
  22412. _sg.limits.max_texture_bindings_per_stage = SG_MAX_PORTABLE_TEXTURE_BINDINGS_PER_STAGE;
  22413. if (_sg.features.compute) {
  22414. _sg.limits.max_storage_buffer_bindings_per_stage = SG_MAX_PORTABLE_STORAGEBUFFER_BINDINGS_PER_STAGE;
  22415. _sg.limits.max_storage_image_bindings_per_stage = SG_MAX_PORTABLE_STORAGEIMAGE_BINDINGS_PER_STAGE;
  22416. }
  22417. }
  22418. }
  22419. // ██████ ██ ██ ██████ ██ ██ ██████
  22420. // ██ ██ ██ ██ ██ ██ ██ ██ ██
  22421. // ██████ ██ ██ ██████ ██ ██ ██
  22422. // ██ ██ ██ ██ ██ ██ ██ ██
  22423. // ██ ██████ ██████ ███████ ██ ██████
  22424. //
  22425. // >>public
  22426. SOKOL_API_IMPL void sg_setup(const sg_desc* desc) {
  22427. SOKOL_ASSERT(!_sg.valid);
  22428. SOKOL_ASSERT(desc);
  22429. SOKOL_ASSERT((desc->_start_canary == 0) && (desc->_end_canary == 0));
  22430. SOKOL_ASSERT((desc->allocator.alloc_fn && desc->allocator.free_fn) || (!desc->allocator.alloc_fn && !desc->allocator.free_fn));
  22431. _SG_CLEAR_ARC_STRUCT(_sg_state_t, _sg);
  22432. _sg.desc = _sg_desc_defaults(desc);
  22433. _sg_setup_pools(&_sg.pools, &_sg.desc);
  22434. _sg_setup_commit_listeners(&_sg.desc);
  22435. _sg.frame_index = 1;
  22436. _sg.stats_enabled = true;
  22437. _sg_setup_backend(&_sg.desc);
  22438. _sg_override_portable_limits();
  22439. _sg.valid = true;
  22440. }
  22441. SOKOL_API_IMPL void sg_shutdown(void) {
  22442. SOKOL_ASSERT(_sg.valid);
  22443. _sg_discard_all_resources();
  22444. _sg_discard_backend();
  22445. _sg_discard_commit_listeners();
  22446. _sg_discard_pools(&_sg.pools);
  22447. _SG_CLEAR_ARC_STRUCT(_sg_state_t, _sg);
  22448. }
  22449. SOKOL_API_IMPL bool sg_isvalid(void) {
  22450. return _sg.valid;
  22451. }
  22452. SOKOL_API_IMPL sg_desc sg_query_desc(void) {
  22453. SOKOL_ASSERT(_sg.valid);
  22454. return _sg.desc;
  22455. }
  22456. SOKOL_API_IMPL sg_backend sg_query_backend(void) {
  22457. SOKOL_ASSERT(_sg.valid);
  22458. return _sg.backend;
  22459. }
  22460. SOKOL_API_IMPL sg_features sg_query_features(void) {
  22461. SOKOL_ASSERT(_sg.valid);
  22462. return _sg.features;
  22463. }
  22464. SOKOL_API_IMPL sg_limits sg_query_limits(void) {
  22465. SOKOL_ASSERT(_sg.valid);
  22466. return _sg.limits;
  22467. }
  22468. SOKOL_API_IMPL sg_pixelformat_info sg_query_pixelformat(sg_pixel_format fmt) {
  22469. SOKOL_ASSERT(_sg.valid);
  22470. int fmt_index = (int) fmt;
  22471. SOKOL_ASSERT((fmt_index > SG_PIXELFORMAT_NONE) && (fmt_index < _SG_PIXELFORMAT_NUM));
  22472. const _sg_pixelformat_info_t* src = &_sg.formats[fmt_index];
  22473. _SG_STRUCT(sg_pixelformat_info, res);
  22474. res.sample = src->sample;
  22475. res.filter = src->filter;
  22476. res.render = src->render;
  22477. res.blend = src->blend;
  22478. res.msaa = src->msaa;
  22479. res.depth = src->depth;
  22480. res.compressed = _sg_is_compressed_pixel_format(fmt);
  22481. res.read = src->read;
  22482. res.write = src->write;
  22483. if (!res.compressed) {
  22484. res.bytes_per_pixel = _sg_pixelformat_bytesize(fmt);
  22485. }
  22486. return res;
  22487. }
  22488. SOKOL_API_IMPL int sg_query_row_pitch(sg_pixel_format fmt, int width, int row_align_bytes) {
  22489. SOKOL_ASSERT(_sg.valid);
  22490. SOKOL_ASSERT(width > 0);
  22491. SOKOL_ASSERT((row_align_bytes > 0) && _sg_ispow2(row_align_bytes));
  22492. SOKOL_ASSERT(((int)fmt > SG_PIXELFORMAT_NONE) && ((int)fmt < _SG_PIXELFORMAT_NUM));
  22493. return _sg_row_pitch(fmt, width, row_align_bytes);
  22494. }
  22495. SOKOL_API_IMPL int sg_query_surface_pitch(sg_pixel_format fmt, int width, int height, int row_align_bytes) {
  22496. SOKOL_ASSERT(_sg.valid);
  22497. SOKOL_ASSERT((width > 0) && (height > 0));
  22498. SOKOL_ASSERT((row_align_bytes > 0) && _sg_ispow2(row_align_bytes));
  22499. SOKOL_ASSERT(((int)fmt > SG_PIXELFORMAT_NONE) && ((int)fmt < _SG_PIXELFORMAT_NUM));
  22500. return _sg_surface_pitch(fmt, width, height, row_align_bytes);
  22501. }
  22502. SOKOL_API_IMPL sg_stats sg_query_stats(void) {
  22503. SOKOL_ASSERT(_sg.valid);
  22504. _sg_update_alive_free_resource_stats(&_sg.stats.total.buffers, &_sg.pools.buffer_pool);
  22505. _sg_update_alive_free_resource_stats(&_sg.stats.total.images, &_sg.pools.image_pool);
  22506. _sg_update_alive_free_resource_stats(&_sg.stats.total.views, &_sg.pools.view_pool);
  22507. _sg_update_alive_free_resource_stats(&_sg.stats.total.samplers, &_sg.pools.sampler_pool);
  22508. _sg_update_alive_free_resource_stats(&_sg.stats.total.shaders, &_sg.pools.shader_pool);
  22509. _sg_update_alive_free_resource_stats(&_sg.stats.total.pipelines, &_sg.pools.pipeline_pool);
  22510. return _sg.stats;
  22511. }
  22512. SOKOL_API_IMPL sg_trace_hooks sg_install_trace_hooks(const sg_trace_hooks* trace_hooks) {
  22513. SOKOL_ASSERT(_sg.valid);
  22514. SOKOL_ASSERT(trace_hooks);
  22515. _SOKOL_UNUSED(trace_hooks);
  22516. #if defined(SOKOL_TRACE_HOOKS)
  22517. sg_trace_hooks old_hooks = _sg.hooks;
  22518. _sg.hooks = *trace_hooks;
  22519. #else
  22520. static sg_trace_hooks old_hooks;
  22521. _SG_WARN(TRACE_HOOKS_NOT_ENABLED);
  22522. #endif
  22523. return old_hooks;
  22524. }
  22525. SOKOL_API_IMPL sg_buffer sg_alloc_buffer(void) {
  22526. SOKOL_ASSERT(_sg.valid);
  22527. sg_buffer res = _sg_alloc_buffer();
  22528. _SG_TRACE_ARGS(alloc_buffer, res);
  22529. return res;
  22530. }
  22531. SOKOL_API_IMPL sg_image sg_alloc_image(void) {
  22532. SOKOL_ASSERT(_sg.valid);
  22533. sg_image res = _sg_alloc_image();
  22534. _SG_TRACE_ARGS(alloc_image, res);
  22535. return res;
  22536. }
  22537. SOKOL_API_IMPL sg_sampler sg_alloc_sampler(void) {
  22538. SOKOL_ASSERT(_sg.valid);
  22539. sg_sampler res = _sg_alloc_sampler();
  22540. _SG_TRACE_ARGS(alloc_sampler, res);
  22541. return res;
  22542. }
  22543. SOKOL_API_IMPL sg_shader sg_alloc_shader(void) {
  22544. SOKOL_ASSERT(_sg.valid);
  22545. sg_shader res = _sg_alloc_shader();
  22546. _SG_TRACE_ARGS(alloc_shader, res);
  22547. return res;
  22548. }
  22549. SOKOL_API_IMPL sg_pipeline sg_alloc_pipeline(void) {
  22550. SOKOL_ASSERT(_sg.valid);
  22551. sg_pipeline res = _sg_alloc_pipeline();
  22552. _SG_TRACE_ARGS(alloc_pipeline, res);
  22553. return res;
  22554. }
  22555. SOKOL_API_IMPL sg_view sg_alloc_view(void) {
  22556. SOKOL_ASSERT(_sg.valid);
  22557. sg_view res = _sg_alloc_view();
  22558. _SG_TRACE_ARGS(alloc_view, res);
  22559. return res;
  22560. }
  22561. SOKOL_API_IMPL void sg_dealloc_buffer(sg_buffer buf_id) {
  22562. SOKOL_ASSERT(_sg.valid);
  22563. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  22564. if (buf) {
  22565. if (buf->slot.state == SG_RESOURCESTATE_ALLOC) {
  22566. _sg_dealloc_buffer(buf);
  22567. } else {
  22568. _SG_ERROR(DEALLOC_BUFFER_INVALID_STATE);
  22569. }
  22570. }
  22571. _SG_TRACE_ARGS(dealloc_buffer, buf_id);
  22572. }
  22573. SOKOL_API_IMPL void sg_dealloc_image(sg_image img_id) {
  22574. SOKOL_ASSERT(_sg.valid);
  22575. _sg_image_t* img = _sg_lookup_image(img_id.id);
  22576. if (img) {
  22577. if (img->slot.state == SG_RESOURCESTATE_ALLOC) {
  22578. _sg_dealloc_image(img);
  22579. } else {
  22580. _SG_ERROR(DEALLOC_IMAGE_INVALID_STATE);
  22581. }
  22582. }
  22583. _SG_TRACE_ARGS(dealloc_image, img_id);
  22584. }
  22585. SOKOL_API_IMPL void sg_dealloc_sampler(sg_sampler smp_id) {
  22586. SOKOL_ASSERT(_sg.valid);
  22587. _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  22588. if (smp) {
  22589. if (smp->slot.state == SG_RESOURCESTATE_ALLOC) {
  22590. _sg_dealloc_sampler(smp);
  22591. } else {
  22592. _SG_ERROR(DEALLOC_SAMPLER_INVALID_STATE);
  22593. }
  22594. }
  22595. _SG_TRACE_ARGS(dealloc_sampler, smp_id);
  22596. }
  22597. SOKOL_API_IMPL void sg_dealloc_shader(sg_shader shd_id) {
  22598. SOKOL_ASSERT(_sg.valid);
  22599. _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  22600. if (shd) {
  22601. if (shd->slot.state == SG_RESOURCESTATE_ALLOC) {
  22602. _sg_dealloc_shader(shd);
  22603. } else {
  22604. _SG_ERROR(DEALLOC_SHADER_INVALID_STATE);
  22605. }
  22606. }
  22607. _SG_TRACE_ARGS(dealloc_shader, shd_id);
  22608. }
  22609. SOKOL_API_IMPL void sg_dealloc_pipeline(sg_pipeline pip_id) {
  22610. SOKOL_ASSERT(_sg.valid);
  22611. _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  22612. if (pip) {
  22613. if (pip->slot.state == SG_RESOURCESTATE_ALLOC) {
  22614. _sg_dealloc_pipeline(pip);
  22615. } else {
  22616. _SG_ERROR(DEALLOC_PIPELINE_INVALID_STATE);
  22617. }
  22618. }
  22619. _SG_TRACE_ARGS(dealloc_pipeline, pip_id);
  22620. }
  22621. SOKOL_API_IMPL void sg_dealloc_view(sg_view view_id) {
  22622. SOKOL_ASSERT(_sg.valid);
  22623. _sg_view_t* view = _sg_lookup_view(view_id.id);
  22624. if (view) {
  22625. if (view->slot.state == SG_RESOURCESTATE_ALLOC) {
  22626. _sg_dealloc_view(view);
  22627. } else {
  22628. _SG_ERROR(DEALLOC_VIEW_INVALID_STATE);
  22629. }
  22630. }
  22631. _SG_TRACE_ARGS(dealloc_view, view_id);
  22632. }
  22633. SOKOL_API_IMPL void sg_init_buffer(sg_buffer buf_id, const sg_buffer_desc* desc) {
  22634. SOKOL_ASSERT(_sg.valid);
  22635. sg_buffer_desc desc_def = _sg_buffer_desc_defaults(desc);
  22636. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  22637. if (buf) {
  22638. if (buf->slot.state == SG_RESOURCESTATE_ALLOC) {
  22639. _sg_init_buffer(buf, &desc_def);
  22640. SOKOL_ASSERT((buf->slot.state == SG_RESOURCESTATE_VALID) || (buf->slot.state == SG_RESOURCESTATE_FAILED));
  22641. } else {
  22642. _SG_ERROR(INIT_BUFFER_INVALID_STATE);
  22643. }
  22644. }
  22645. _SG_TRACE_ARGS(init_buffer, buf_id, &desc_def);
  22646. }
  22647. SOKOL_API_IMPL void sg_init_image(sg_image img_id, const sg_image_desc* desc) {
  22648. SOKOL_ASSERT(_sg.valid);
  22649. sg_image_desc desc_def = _sg_image_desc_defaults(desc);
  22650. _sg_image_t* img = _sg_lookup_image(img_id.id);
  22651. if (img) {
  22652. if (img->slot.state == SG_RESOURCESTATE_ALLOC) {
  22653. _sg_init_image(img, &desc_def);
  22654. SOKOL_ASSERT((img->slot.state == SG_RESOURCESTATE_VALID) || (img->slot.state == SG_RESOURCESTATE_FAILED));
  22655. } else {
  22656. _SG_ERROR(INIT_IMAGE_INVALID_STATE);
  22657. }
  22658. }
  22659. _SG_TRACE_ARGS(init_image, img_id, &desc_def);
  22660. }
  22661. SOKOL_API_IMPL void sg_init_sampler(sg_sampler smp_id, const sg_sampler_desc* desc) {
  22662. SOKOL_ASSERT(_sg.valid);
  22663. sg_sampler_desc desc_def = _sg_sampler_desc_defaults(desc);
  22664. _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  22665. if (smp) {
  22666. if (smp->slot.state == SG_RESOURCESTATE_ALLOC) {
  22667. _sg_init_sampler(smp, &desc_def);
  22668. SOKOL_ASSERT((smp->slot.state == SG_RESOURCESTATE_VALID) || (smp->slot.state == SG_RESOURCESTATE_FAILED));
  22669. } else {
  22670. _SG_ERROR(INIT_SAMPLER_INVALID_STATE);
  22671. }
  22672. }
  22673. _SG_TRACE_ARGS(init_sampler, smp_id, &desc_def);
  22674. }
  22675. SOKOL_API_IMPL void sg_init_shader(sg_shader shd_id, const sg_shader_desc* desc) {
  22676. SOKOL_ASSERT(_sg.valid);
  22677. sg_shader_desc desc_def = _sg_shader_desc_defaults(desc);
  22678. _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  22679. if (shd) {
  22680. if (shd->slot.state == SG_RESOURCESTATE_ALLOC) {
  22681. _sg_init_shader(shd, &desc_def);
  22682. SOKOL_ASSERT((shd->slot.state == SG_RESOURCESTATE_VALID) || (shd->slot.state == SG_RESOURCESTATE_FAILED));
  22683. } else {
  22684. _SG_ERROR(INIT_SHADER_INVALID_STATE);
  22685. }
  22686. }
  22687. _SG_TRACE_ARGS(init_shader, shd_id, &desc_def);
  22688. }
  22689. SOKOL_API_IMPL void sg_init_pipeline(sg_pipeline pip_id, const sg_pipeline_desc* desc) {
  22690. SOKOL_ASSERT(_sg.valid);
  22691. sg_pipeline_desc desc_def = _sg_pipeline_desc_defaults(desc);
  22692. _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  22693. if (pip) {
  22694. if (pip->slot.state == SG_RESOURCESTATE_ALLOC) {
  22695. _sg_init_pipeline(pip, &desc_def);
  22696. SOKOL_ASSERT((pip->slot.state == SG_RESOURCESTATE_VALID) || (pip->slot.state == SG_RESOURCESTATE_FAILED));
  22697. } else {
  22698. _SG_ERROR(INIT_PIPELINE_INVALID_STATE);
  22699. }
  22700. }
  22701. _SG_TRACE_ARGS(init_pipeline, pip_id, &desc_def);
  22702. }
  22703. SOKOL_API_IMPL void sg_init_view(sg_view view_id, const sg_view_desc* desc) {
  22704. SOKOL_ASSERT(_sg.valid);
  22705. sg_view_desc desc_def = _sg_view_desc_defaults(desc);
  22706. _sg_view_t* view = _sg_lookup_view(view_id.id);
  22707. if (view) {
  22708. if (view->slot.state == SG_RESOURCESTATE_ALLOC) {
  22709. _sg_init_view(view, &desc_def);
  22710. SOKOL_ASSERT((view->slot.state == SG_RESOURCESTATE_VALID)
  22711. || (view->slot.state == SG_RESOURCESTATE_FAILED)
  22712. || (view->slot.state == SG_RESOURCESTATE_ALLOC));
  22713. } else {
  22714. _SG_ERROR(INIT_VIEW_INVALID_STATE);
  22715. }
  22716. }
  22717. _SG_TRACE_ARGS(init_view, view_id, &desc_def);
  22718. }
  22719. SOKOL_API_IMPL void sg_uninit_buffer(sg_buffer buf_id) {
  22720. SOKOL_ASSERT(_sg.valid);
  22721. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  22722. if (buf) {
  22723. if ((buf->slot.state == SG_RESOURCESTATE_VALID) || (buf->slot.state == SG_RESOURCESTATE_FAILED)) {
  22724. _sg_uninit_buffer(buf);
  22725. SOKOL_ASSERT(buf->slot.state == SG_RESOURCESTATE_ALLOC);
  22726. } else if (buf->slot.state != SG_RESOURCESTATE_ALLOC) {
  22727. _SG_ERROR(UNINIT_BUFFER_INVALID_STATE);
  22728. }
  22729. }
  22730. _SG_TRACE_ARGS(uninit_buffer, buf_id);
  22731. }
  22732. SOKOL_API_IMPL void sg_uninit_image(sg_image img_id) {
  22733. SOKOL_ASSERT(_sg.valid);
  22734. _sg_image_t* img = _sg_lookup_image(img_id.id);
  22735. if (img) {
  22736. if ((img->slot.state == SG_RESOURCESTATE_VALID) || (img->slot.state == SG_RESOURCESTATE_FAILED)) {
  22737. _sg_uninit_image(img);
  22738. SOKOL_ASSERT(img->slot.state == SG_RESOURCESTATE_ALLOC);
  22739. } else if (img->slot.state != SG_RESOURCESTATE_ALLOC) {
  22740. _SG_ERROR(UNINIT_IMAGE_INVALID_STATE);
  22741. }
  22742. }
  22743. _SG_TRACE_ARGS(uninit_image, img_id);
  22744. }
  22745. SOKOL_API_IMPL void sg_uninit_sampler(sg_sampler smp_id) {
  22746. SOKOL_ASSERT(_sg.valid);
  22747. _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  22748. if (smp) {
  22749. if ((smp->slot.state == SG_RESOURCESTATE_VALID) || (smp->slot.state == SG_RESOURCESTATE_FAILED)) {
  22750. _sg_uninit_sampler(smp);
  22751. SOKOL_ASSERT(smp->slot.state == SG_RESOURCESTATE_ALLOC);
  22752. } else if (smp->slot.state != SG_RESOURCESTATE_ALLOC) {
  22753. _SG_ERROR(UNINIT_SAMPLER_INVALID_STATE);
  22754. }
  22755. }
  22756. _SG_TRACE_ARGS(uninit_sampler, smp_id);
  22757. }
  22758. SOKOL_API_IMPL void sg_uninit_shader(sg_shader shd_id) {
  22759. SOKOL_ASSERT(_sg.valid);
  22760. _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  22761. if (shd) {
  22762. if ((shd->slot.state == SG_RESOURCESTATE_VALID) || (shd->slot.state == SG_RESOURCESTATE_FAILED)) {
  22763. _sg_uninit_shader(shd);
  22764. SOKOL_ASSERT(shd->slot.state == SG_RESOURCESTATE_ALLOC);
  22765. } else if (shd->slot.state != SG_RESOURCESTATE_ALLOC) {
  22766. _SG_ERROR(UNINIT_SHADER_INVALID_STATE);
  22767. }
  22768. }
  22769. _SG_TRACE_ARGS(uninit_shader, shd_id);
  22770. }
  22771. SOKOL_API_IMPL void sg_uninit_pipeline(sg_pipeline pip_id) {
  22772. SOKOL_ASSERT(_sg.valid);
  22773. _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  22774. if (pip) {
  22775. if ((pip->slot.state == SG_RESOURCESTATE_VALID) || (pip->slot.state == SG_RESOURCESTATE_FAILED)) {
  22776. _sg_uninit_pipeline(pip);
  22777. SOKOL_ASSERT(pip->slot.state == SG_RESOURCESTATE_ALLOC);
  22778. } else if (pip->slot.state != SG_RESOURCESTATE_ALLOC) {
  22779. _SG_ERROR(UNINIT_PIPELINE_INVALID_STATE);
  22780. }
  22781. }
  22782. _SG_TRACE_ARGS(uninit_pipeline, pip_id);
  22783. }
  22784. SOKOL_API_IMPL void sg_uninit_view(sg_view view_id) {
  22785. SOKOL_ASSERT(_sg.valid);
  22786. _sg_view_t* view = _sg_lookup_view(view_id.id);
  22787. if (view) {
  22788. if ((view->slot.state == SG_RESOURCESTATE_VALID) || (view->slot.state == SG_RESOURCESTATE_FAILED)) {
  22789. _sg_uninit_view(view);
  22790. SOKOL_ASSERT(view->slot.state == SG_RESOURCESTATE_ALLOC);
  22791. } else if (view->slot.state != SG_RESOURCESTATE_ALLOC) {
  22792. _SG_ERROR(UNINIT_VIEW_INVALID_STATE);
  22793. }
  22794. }
  22795. _SG_TRACE_ARGS(uninit_view, view_id);
  22796. }
  22797. SOKOL_API_IMPL void sg_fail_buffer(sg_buffer buf_id) {
  22798. SOKOL_ASSERT(_sg.valid);
  22799. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  22800. if (buf) {
  22801. if (buf->slot.state == SG_RESOURCESTATE_ALLOC) {
  22802. buf->slot.state = SG_RESOURCESTATE_FAILED;
  22803. } else {
  22804. _SG_ERROR(FAIL_BUFFER_INVALID_STATE);
  22805. }
  22806. }
  22807. _SG_TRACE_ARGS(fail_buffer, buf_id);
  22808. }
  22809. SOKOL_API_IMPL void sg_fail_image(sg_image img_id) {
  22810. SOKOL_ASSERT(_sg.valid);
  22811. _sg_image_t* img = _sg_lookup_image(img_id.id);
  22812. if (img) {
  22813. if (img->slot.state == SG_RESOURCESTATE_ALLOC) {
  22814. img->slot.state = SG_RESOURCESTATE_FAILED;
  22815. } else {
  22816. _SG_ERROR(FAIL_IMAGE_INVALID_STATE);
  22817. }
  22818. }
  22819. _SG_TRACE_ARGS(fail_image, img_id);
  22820. }
  22821. SOKOL_API_IMPL void sg_fail_sampler(sg_sampler smp_id) {
  22822. SOKOL_ASSERT(_sg.valid);
  22823. _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  22824. if (smp) {
  22825. if (smp->slot.state == SG_RESOURCESTATE_ALLOC) {
  22826. smp->slot.state = SG_RESOURCESTATE_FAILED;
  22827. } else {
  22828. _SG_ERROR(FAIL_SAMPLER_INVALID_STATE);
  22829. }
  22830. }
  22831. _SG_TRACE_ARGS(fail_sampler, smp_id);
  22832. }
  22833. SOKOL_API_IMPL void sg_fail_shader(sg_shader shd_id) {
  22834. SOKOL_ASSERT(_sg.valid);
  22835. _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  22836. if (shd) {
  22837. if (shd->slot.state == SG_RESOURCESTATE_ALLOC) {
  22838. shd->slot.state = SG_RESOURCESTATE_FAILED;
  22839. } else {
  22840. _SG_ERROR(FAIL_SHADER_INVALID_STATE);
  22841. }
  22842. }
  22843. _SG_TRACE_ARGS(fail_shader, shd_id);
  22844. }
  22845. SOKOL_API_IMPL void sg_fail_pipeline(sg_pipeline pip_id) {
  22846. SOKOL_ASSERT(_sg.valid);
  22847. _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  22848. if (pip) {
  22849. if (pip->slot.state == SG_RESOURCESTATE_ALLOC) {
  22850. pip->slot.state = SG_RESOURCESTATE_FAILED;
  22851. } else {
  22852. _SG_ERROR(FAIL_PIPELINE_INVALID_STATE);
  22853. }
  22854. }
  22855. _SG_TRACE_ARGS(fail_pipeline, pip_id);
  22856. }
  22857. SOKOL_API_IMPL void sg_fail_view(sg_view view_id) {
  22858. SOKOL_ASSERT(_sg.valid);
  22859. _sg_view_t* view = _sg_lookup_view(view_id.id);
  22860. if (view) {
  22861. if (view->slot.state == SG_RESOURCESTATE_ALLOC) {
  22862. view->slot.state = SG_RESOURCESTATE_FAILED;
  22863. } else {
  22864. _SG_ERROR(FAIL_VIEW_INVALID_STATE);
  22865. }
  22866. }
  22867. _SG_TRACE_ARGS(fail_view, view_id);
  22868. }
  22869. SOKOL_API_IMPL sg_resource_state sg_query_buffer_state(sg_buffer buf_id) {
  22870. SOKOL_ASSERT(_sg.valid);
  22871. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  22872. sg_resource_state res = buf ? buf->slot.state : SG_RESOURCESTATE_INVALID;
  22873. return res;
  22874. }
  22875. SOKOL_API_IMPL sg_resource_state sg_query_image_state(sg_image img_id) {
  22876. SOKOL_ASSERT(_sg.valid);
  22877. _sg_image_t* img = _sg_lookup_image(img_id.id);
  22878. sg_resource_state res = img ? img->slot.state : SG_RESOURCESTATE_INVALID;
  22879. return res;
  22880. }
  22881. SOKOL_API_IMPL sg_resource_state sg_query_sampler_state(sg_sampler smp_id) {
  22882. SOKOL_ASSERT(_sg.valid);
  22883. _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  22884. sg_resource_state res = smp ? smp->slot.state : SG_RESOURCESTATE_INVALID;
  22885. return res;
  22886. }
  22887. SOKOL_API_IMPL sg_resource_state sg_query_shader_state(sg_shader shd_id) {
  22888. SOKOL_ASSERT(_sg.valid);
  22889. _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  22890. sg_resource_state res = shd ? shd->slot.state : SG_RESOURCESTATE_INVALID;
  22891. return res;
  22892. }
  22893. SOKOL_API_IMPL sg_resource_state sg_query_pipeline_state(sg_pipeline pip_id) {
  22894. SOKOL_ASSERT(_sg.valid);
  22895. _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  22896. sg_resource_state res = pip ? pip->slot.state : SG_RESOURCESTATE_INVALID;
  22897. return res;
  22898. }
  22899. SOKOL_API_IMPL sg_resource_state sg_query_view_state(sg_view view_id) {
  22900. SOKOL_ASSERT(_sg.valid);
  22901. _sg_view_t* view = _sg_lookup_view(view_id.id);
  22902. sg_resource_state res = view ? view->slot.state : SG_RESOURCESTATE_INVALID;
  22903. return res;
  22904. }
  22905. SOKOL_API_IMPL sg_buffer sg_make_buffer(const sg_buffer_desc* desc) {
  22906. SOKOL_ASSERT(_sg.valid);
  22907. SOKOL_ASSERT(desc);
  22908. sg_buffer_desc desc_def = _sg_buffer_desc_defaults(desc);
  22909. sg_buffer buf_id = _sg_alloc_buffer();
  22910. if (buf_id.id != SG_INVALID_ID) {
  22911. _sg_buffer_t* buf = _sg_buffer_at(buf_id.id);
  22912. SOKOL_ASSERT(buf && (buf->slot.state == SG_RESOURCESTATE_ALLOC));
  22913. _sg_init_buffer(buf, &desc_def);
  22914. SOKOL_ASSERT((buf->slot.state == SG_RESOURCESTATE_VALID) || (buf->slot.state == SG_RESOURCESTATE_FAILED));
  22915. }
  22916. _SG_TRACE_ARGS(make_buffer, &desc_def, buf_id);
  22917. return buf_id;
  22918. }
  22919. SOKOL_API_IMPL sg_image sg_make_image(const sg_image_desc* desc) {
  22920. SOKOL_ASSERT(_sg.valid);
  22921. SOKOL_ASSERT(desc);
  22922. sg_image_desc desc_def = _sg_image_desc_defaults(desc);
  22923. sg_image img_id = _sg_alloc_image();
  22924. if (img_id.id != SG_INVALID_ID) {
  22925. _sg_image_t* img = _sg_image_at(img_id.id);
  22926. SOKOL_ASSERT(img && (img->slot.state == SG_RESOURCESTATE_ALLOC));
  22927. _sg_init_image(img, &desc_def);
  22928. SOKOL_ASSERT((img->slot.state == SG_RESOURCESTATE_VALID) || (img->slot.state == SG_RESOURCESTATE_FAILED));
  22929. }
  22930. _SG_TRACE_ARGS(make_image, &desc_def, img_id);
  22931. return img_id;
  22932. }
  22933. SOKOL_API_IMPL sg_sampler sg_make_sampler(const sg_sampler_desc* desc) {
  22934. SOKOL_ASSERT(_sg.valid);
  22935. SOKOL_ASSERT(desc);
  22936. sg_sampler_desc desc_def = _sg_sampler_desc_defaults(desc);
  22937. sg_sampler smp_id = _sg_alloc_sampler();
  22938. if (smp_id.id != SG_INVALID_ID) {
  22939. _sg_sampler_t* smp = _sg_sampler_at(smp_id.id);
  22940. SOKOL_ASSERT(smp && (smp->slot.state == SG_RESOURCESTATE_ALLOC));
  22941. _sg_init_sampler(smp, &desc_def);
  22942. SOKOL_ASSERT((smp->slot.state == SG_RESOURCESTATE_VALID) || (smp->slot.state == SG_RESOURCESTATE_FAILED));
  22943. }
  22944. _SG_TRACE_ARGS(make_sampler, &desc_def, smp_id);
  22945. return smp_id;
  22946. }
  22947. SOKOL_API_IMPL sg_shader sg_make_shader(const sg_shader_desc* desc) {
  22948. SOKOL_ASSERT(_sg.valid);
  22949. SOKOL_ASSERT(desc);
  22950. sg_shader_desc desc_def = _sg_shader_desc_defaults(desc);
  22951. sg_shader shd_id = _sg_alloc_shader();
  22952. if (shd_id.id != SG_INVALID_ID) {
  22953. _sg_shader_t* shd = _sg_shader_at(shd_id.id);
  22954. SOKOL_ASSERT(shd && (shd->slot.state == SG_RESOURCESTATE_ALLOC));
  22955. _sg_init_shader(shd, &desc_def);
  22956. SOKOL_ASSERT((shd->slot.state == SG_RESOURCESTATE_VALID) || (shd->slot.state == SG_RESOURCESTATE_FAILED));
  22957. }
  22958. _SG_TRACE_ARGS(make_shader, &desc_def, shd_id);
  22959. return shd_id;
  22960. }
  22961. SOKOL_API_IMPL sg_pipeline sg_make_pipeline(const sg_pipeline_desc* desc) {
  22962. SOKOL_ASSERT(_sg.valid);
  22963. SOKOL_ASSERT(desc);
  22964. sg_pipeline_desc desc_def = _sg_pipeline_desc_defaults(desc);
  22965. sg_pipeline pip_id = _sg_alloc_pipeline();
  22966. if (pip_id.id != SG_INVALID_ID) {
  22967. _sg_pipeline_t* pip = _sg_pipeline_at(pip_id.id);
  22968. SOKOL_ASSERT(pip && (pip->slot.state == SG_RESOURCESTATE_ALLOC));
  22969. _sg_init_pipeline(pip, &desc_def);
  22970. SOKOL_ASSERT((pip->slot.state == SG_RESOURCESTATE_VALID) || (pip->slot.state == SG_RESOURCESTATE_FAILED));
  22971. }
  22972. _SG_TRACE_ARGS(make_pipeline, &desc_def, pip_id);
  22973. return pip_id;
  22974. }
  22975. SOKOL_API_IMPL sg_view sg_make_view(const sg_view_desc* desc) {
  22976. SOKOL_ASSERT(_sg.valid);
  22977. SOKOL_ASSERT(desc);
  22978. sg_view_desc desc_def = _sg_view_desc_defaults(desc);
  22979. sg_view view_id = _sg_alloc_view();
  22980. if (view_id.id != SG_INVALID_ID) {
  22981. _sg_view_t* view = _sg_view_at(view_id.id);
  22982. SOKOL_ASSERT(view && (view->slot.state == SG_RESOURCESTATE_ALLOC));
  22983. _sg_init_view(view, &desc_def);
  22984. SOKOL_ASSERT((view->slot.state == SG_RESOURCESTATE_VALID) || (view->slot.state == SG_RESOURCESTATE_FAILED));
  22985. }
  22986. _SG_TRACE_ARGS(make_view, &desc_def, view_id);
  22987. return view_id;
  22988. }
  22989. SOKOL_API_IMPL void sg_destroy_buffer(sg_buffer buf_id) {
  22990. SOKOL_ASSERT(_sg.valid);
  22991. _SG_TRACE_ARGS(destroy_buffer, buf_id);
  22992. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  22993. if (buf) {
  22994. if ((buf->slot.state == SG_RESOURCESTATE_VALID) || (buf->slot.state == SG_RESOURCESTATE_FAILED)) {
  22995. _sg_uninit_buffer(buf);
  22996. SOKOL_ASSERT(buf->slot.state == SG_RESOURCESTATE_ALLOC);
  22997. }
  22998. if (buf->slot.state == SG_RESOURCESTATE_ALLOC) {
  22999. _sg_dealloc_buffer(buf);
  23000. SOKOL_ASSERT(buf->slot.state == SG_RESOURCESTATE_INITIAL);
  23001. }
  23002. }
  23003. }
  23004. SOKOL_API_IMPL void sg_destroy_image(sg_image img_id) {
  23005. SOKOL_ASSERT(_sg.valid);
  23006. _SG_TRACE_ARGS(destroy_image, img_id);
  23007. _sg_image_t* img = _sg_lookup_image(img_id.id);
  23008. if (img) {
  23009. if ((img->slot.state == SG_RESOURCESTATE_VALID) || (img->slot.state == SG_RESOURCESTATE_FAILED)) {
  23010. _sg_uninit_image(img);
  23011. SOKOL_ASSERT(img->slot.state == SG_RESOURCESTATE_ALLOC);
  23012. }
  23013. if (img->slot.state == SG_RESOURCESTATE_ALLOC) {
  23014. _sg_dealloc_image(img);
  23015. SOKOL_ASSERT(img->slot.state == SG_RESOURCESTATE_INITIAL);
  23016. }
  23017. }
  23018. }
  23019. SOKOL_API_IMPL void sg_destroy_sampler(sg_sampler smp_id) {
  23020. SOKOL_ASSERT(_sg.valid);
  23021. _SG_TRACE_ARGS(destroy_sampler, smp_id);
  23022. _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  23023. if (smp) {
  23024. if ((smp->slot.state == SG_RESOURCESTATE_VALID) || (smp->slot.state == SG_RESOURCESTATE_FAILED)) {
  23025. _sg_uninit_sampler(smp);
  23026. SOKOL_ASSERT(smp->slot.state == SG_RESOURCESTATE_ALLOC);
  23027. }
  23028. if (smp->slot.state == SG_RESOURCESTATE_ALLOC) {
  23029. _sg_dealloc_sampler(smp);
  23030. SOKOL_ASSERT(smp->slot.state == SG_RESOURCESTATE_INITIAL);
  23031. }
  23032. }
  23033. }
  23034. SOKOL_API_IMPL void sg_destroy_shader(sg_shader shd_id) {
  23035. SOKOL_ASSERT(_sg.valid);
  23036. _SG_TRACE_ARGS(destroy_shader, shd_id);
  23037. _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  23038. if (shd) {
  23039. if ((shd->slot.state == SG_RESOURCESTATE_VALID) || (shd->slot.state == SG_RESOURCESTATE_FAILED)) {
  23040. _sg_uninit_shader(shd);
  23041. SOKOL_ASSERT(shd->slot.state == SG_RESOURCESTATE_ALLOC);
  23042. }
  23043. if (shd->slot.state == SG_RESOURCESTATE_ALLOC) {
  23044. _sg_dealloc_shader(shd);
  23045. SOKOL_ASSERT(shd->slot.state == SG_RESOURCESTATE_INITIAL);
  23046. }
  23047. }
  23048. }
  23049. SOKOL_API_IMPL void sg_destroy_pipeline(sg_pipeline pip_id) {
  23050. SOKOL_ASSERT(_sg.valid);
  23051. _SG_TRACE_ARGS(destroy_pipeline, pip_id);
  23052. _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  23053. if (pip) {
  23054. if ((pip->slot.state == SG_RESOURCESTATE_VALID) || (pip->slot.state == SG_RESOURCESTATE_FAILED)) {
  23055. _sg_uninit_pipeline(pip);
  23056. SOKOL_ASSERT(pip->slot.state == SG_RESOURCESTATE_ALLOC);
  23057. }
  23058. if (pip->slot.state == SG_RESOURCESTATE_ALLOC) {
  23059. _sg_dealloc_pipeline(pip);
  23060. SOKOL_ASSERT(pip->slot.state == SG_RESOURCESTATE_INITIAL);
  23061. }
  23062. }
  23063. }
  23064. SOKOL_API_IMPL void sg_destroy_view(sg_view view_id) {
  23065. SOKOL_ASSERT(_sg.valid);
  23066. _SG_TRACE_ARGS(destroy_view, view_id);
  23067. _sg_view_t* view = _sg_lookup_view(view_id.id);
  23068. if (view) {
  23069. if ((view->slot.state == SG_RESOURCESTATE_VALID) || (view->slot.state == SG_RESOURCESTATE_FAILED)) {
  23070. _sg_uninit_view(view);
  23071. SOKOL_ASSERT(view->slot.state == SG_RESOURCESTATE_ALLOC);
  23072. }
  23073. if (view->slot.state == SG_RESOURCESTATE_ALLOC) {
  23074. _sg_dealloc_view(view);
  23075. SOKOL_ASSERT(view->slot.state == SG_RESOURCESTATE_INITIAL);
  23076. }
  23077. }
  23078. }
  23079. SOKOL_API_IMPL void sg_begin_pass(const sg_pass* pass) {
  23080. SOKOL_ASSERT(_sg.valid);
  23081. SOKOL_ASSERT(!_sg.cur_pass.valid);
  23082. SOKOL_ASSERT(!_sg.cur_pass.in_pass);
  23083. SOKOL_ASSERT(_sg_attachments_empty(&_sg.cur_pass.atts));
  23084. SOKOL_ASSERT(pass);
  23085. SOKOL_ASSERT((pass->_start_canary == 0) && (pass->_end_canary == 0));
  23086. _sg.cur_pass.in_pass = true;
  23087. const sg_pass pass_def = _sg_pass_defaults(pass);
  23088. if (!_sg_validate_pass_attachment_limits(&pass_def)) {
  23089. return;
  23090. }
  23091. if (!_sg_validate_begin_pass(&pass_def)) {
  23092. return;
  23093. }
  23094. const _sg_attachments_ptrs_t atts_ptrs = _sg_attachments_ptrs(&pass_def.attachments);
  23095. if (!atts_ptrs.empty) {
  23096. if (!_sg_attachments_alive(&atts_ptrs)) {
  23097. _SG_ERROR(BEGINPASS_ATTACHMENTS_ALIVE);
  23098. return;
  23099. }
  23100. _sg.cur_pass.atts = pass->attachments;
  23101. _sg.cur_pass.dim = _sg_attachments_dim(&atts_ptrs);
  23102. } else if (!pass_def.compute) {
  23103. // a swapchain pass
  23104. SOKOL_ASSERT(pass_def.swapchain.width > 0);
  23105. SOKOL_ASSERT(pass_def.swapchain.height > 0);
  23106. SOKOL_ASSERT(pass_def.swapchain.color_format > SG_PIXELFORMAT_NONE);
  23107. SOKOL_ASSERT(pass_def.swapchain.sample_count > 0);
  23108. _sg.cur_pass.dim.width = pass_def.swapchain.width;
  23109. _sg.cur_pass.dim.height = pass_def.swapchain.height;
  23110. _sg.cur_pass.swapchain.color_fmt = pass_def.swapchain.color_format;
  23111. _sg.cur_pass.swapchain.depth_fmt = pass_def.swapchain.depth_format;
  23112. _sg.cur_pass.swapchain.sample_count = pass_def.swapchain.sample_count;
  23113. }
  23114. _sg.cur_pass.action = pass_def.action;
  23115. _sg.cur_pass.valid = true; // may be overruled by backend begin-pass functions
  23116. _sg.cur_pass.is_compute = pass_def.compute;
  23117. _sg_begin_pass(&pass_def, &atts_ptrs);
  23118. _SG_TRACE_ARGS(begin_pass, &pass_def);
  23119. }
  23120. SOKOL_API_IMPL void sg_apply_viewport(int x, int y, int width, int height, bool origin_top_left) {
  23121. SOKOL_ASSERT(_sg.valid);
  23122. #if defined(SOKOL_DEBUG)
  23123. if (!_sg_validate_apply_viewport(x, y, width, height, origin_top_left)) {
  23124. return;
  23125. }
  23126. #endif
  23127. _sg_stats_inc(num_apply_viewport);
  23128. if (!_sg.cur_pass.valid) {
  23129. return;
  23130. }
  23131. _sg_apply_viewport(x, y, width, height, origin_top_left);
  23132. _SG_TRACE_ARGS(apply_viewport, x, y, width, height, origin_top_left);
  23133. }
  23134. SOKOL_API_IMPL void sg_apply_viewportf(float x, float y, float width, float height, bool origin_top_left) {
  23135. sg_apply_viewport((int)x, (int)y, (int)width, (int)height, origin_top_left);
  23136. }
  23137. SOKOL_API_IMPL void sg_apply_scissor_rect(int x, int y, int width, int height, bool origin_top_left) {
  23138. SOKOL_ASSERT(_sg.valid);
  23139. #if defined(SOKOL_DEBUG)
  23140. if (!_sg_validate_apply_scissor_rect(x, y, width, height, origin_top_left)) {
  23141. return;
  23142. }
  23143. #endif
  23144. _sg_stats_inc(num_apply_scissor_rect);
  23145. if (!_sg.cur_pass.valid) {
  23146. return;
  23147. }
  23148. _sg_apply_scissor_rect(x, y, width, height, origin_top_left);
  23149. _SG_TRACE_ARGS(apply_scissor_rect, x, y, width, height, origin_top_left);
  23150. }
  23151. SOKOL_API_IMPL void sg_apply_scissor_rectf(float x, float y, float width, float height, bool origin_top_left) {
  23152. sg_apply_scissor_rect((int)x, (int)y, (int)width, (int)height, origin_top_left);
  23153. }
  23154. SOKOL_API_IMPL void sg_apply_pipeline(sg_pipeline pip_id) {
  23155. SOKOL_ASSERT(_sg.valid);
  23156. _sg_stats_inc(num_apply_pipeline);
  23157. if (!_sg_validate_apply_pipeline(pip_id)) {
  23158. _sg.next_draw_valid = false;
  23159. return;
  23160. }
  23161. if (!_sg.cur_pass.valid) {
  23162. return;
  23163. }
  23164. _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  23165. SOKOL_ASSERT(pip);
  23166. _sg.cur_pip = _sg_pipeline_ref(pip);
  23167. _sg.next_draw_valid = (SG_RESOURCESTATE_VALID == pip->slot.state);
  23168. if (!_sg.next_draw_valid) {
  23169. return;
  23170. }
  23171. _sg.use_indexed_draw = pip->cmn.index_type != SG_INDEXTYPE_NONE;
  23172. _sg.use_instanced_draw = pip->cmn.use_instanced_draw;
  23173. _sg_apply_pipeline(pip);
  23174. // set the expected bindings and uniform block flags
  23175. const _sg_shader_t* shd = _sg_shader_ref_ptr(&pip->cmn.shader);
  23176. _sg.required_bindings_and_uniforms = pip->cmn.required_bindings_and_uniforms | shd->cmn.required_bindings_and_uniforms;
  23177. _sg.applied_bindings_and_uniforms = 0;
  23178. _SG_TRACE_ARGS(apply_pipeline, pip_id);
  23179. }
  23180. SOKOL_API_IMPL void sg_apply_bindings(const sg_bindings* bindings) {
  23181. SOKOL_ASSERT(_sg.valid);
  23182. SOKOL_ASSERT(bindings);
  23183. _sg_stats_inc(num_apply_bindings);
  23184. _sg.applied_bindings_and_uniforms |= (1 << SG_MAX_UNIFORMBLOCK_BINDSLOTS);
  23185. if (!_sg_validate_apply_bindings(bindings)) {
  23186. _sg.next_draw_valid = false;
  23187. }
  23188. SOKOL_ASSERT((bindings->_start_canary == 0) && (bindings->_end_canary==0));
  23189. if (!_sg_pipeline_ref_alive(&_sg.cur_pip)) {
  23190. _sg.next_draw_valid = false;
  23191. }
  23192. if (!_sg.cur_pass.valid) {
  23193. return;
  23194. }
  23195. if (!_sg.next_draw_valid) {
  23196. return;
  23197. }
  23198. _SG_STRUCT(_sg_bindings_ptrs_t, bnd);
  23199. bnd.pip = _sg_pipeline_ref_ptr(&_sg.cur_pip);
  23200. const _sg_shader_t* shd = _sg_shader_ref_ptr(&bnd.pip->cmn.shader);
  23201. if (!_sg.cur_pass.is_compute) {
  23202. for (size_t i = 0; i < SG_MAX_VERTEXBUFFER_BINDSLOTS; i++) {
  23203. if (bnd.pip->cmn.vertex_buffer_layout_active[i]) {
  23204. SOKOL_ASSERT(bindings->vertex_buffers[i].id != SG_INVALID_ID);
  23205. bnd.vbs[i] = _sg_lookup_buffer(bindings->vertex_buffers[i].id);
  23206. bnd.vb_offsets[i] = bindings->vertex_buffer_offsets[i];
  23207. _sg.next_draw_valid &= bnd.vbs[i] && (SG_RESOURCESTATE_VALID == bnd.vbs[i]->slot.state);
  23208. }
  23209. }
  23210. if (bindings->index_buffer.id) {
  23211. bnd.ib = _sg_lookup_buffer(bindings->index_buffer.id);
  23212. bnd.ib_offset = bindings->index_buffer_offset;
  23213. _sg.next_draw_valid &= bnd.ib && (SG_RESOURCESTATE_VALID == bnd.ib->slot.state);
  23214. }
  23215. }
  23216. for (int i = 0; i < SG_MAX_VIEW_BINDSLOTS; i++) {
  23217. if (shd->cmn.views[i].view_type != SG_VIEWTYPE_INVALID) {
  23218. SOKOL_ASSERT(bindings->views[i].id != SG_INVALID_ID);
  23219. bnd.views[i] = _sg_lookup_view(bindings->views[i].id);
  23220. if (bnd.views[i]) {
  23221. if (bnd.views[i]->cmn.type == SG_VIEWTYPE_STORAGEBUFFER) {
  23222. _sg.next_draw_valid &= _sg_buffer_ref_valid(&bnd.views[i]->cmn.buf.ref);
  23223. } else {
  23224. _sg.next_draw_valid &= _sg_image_ref_valid(&bnd.views[i]->cmn.img.ref);
  23225. }
  23226. } else {
  23227. _sg.next_draw_valid = false;
  23228. }
  23229. }
  23230. }
  23231. for (size_t i = 0; i < SG_MAX_SAMPLER_BINDSLOTS; i++) {
  23232. if (shd->cmn.samplers[i].stage != SG_SHADERSTAGE_NONE) {
  23233. SOKOL_ASSERT(bindings->samplers[i].id != SG_INVALID_ID);
  23234. bnd.smps[i] = _sg_lookup_sampler(bindings->samplers[i].id);
  23235. SOKOL_ASSERT(bnd.smps[i]);
  23236. }
  23237. }
  23238. if (_sg.next_draw_valid) {
  23239. _sg.next_draw_valid &= _sg_apply_bindings(&bnd);
  23240. _SG_TRACE_ARGS(apply_bindings, bindings);
  23241. }
  23242. }
  23243. SOKOL_API_IMPL void sg_apply_uniforms(int ub_slot, const sg_range* data) {
  23244. SOKOL_ASSERT(_sg.valid);
  23245. SOKOL_ASSERT((ub_slot >= 0) && (ub_slot < SG_MAX_UNIFORMBLOCK_BINDSLOTS));
  23246. SOKOL_ASSERT(data && data->ptr && (data->size > 0));
  23247. _sg_stats_inc(num_apply_uniforms);
  23248. _sg_stats_add(size_apply_uniforms, (uint32_t)data->size);
  23249. _sg.applied_bindings_and_uniforms |= 1 << ub_slot;
  23250. if (!_sg_validate_apply_uniforms(ub_slot, data)) {
  23251. _sg.next_draw_valid = false;
  23252. return;
  23253. }
  23254. if (!_sg.cur_pass.valid) {
  23255. return;
  23256. }
  23257. if (!_sg.next_draw_valid) {
  23258. return;
  23259. }
  23260. _sg_apply_uniforms(ub_slot, data);
  23261. _SG_TRACE_ARGS(apply_uniforms, ub_slot, data);
  23262. }
  23263. _SOKOL_PRIVATE bool _sg_check_skip_draw(int num_elements, int num_instances) {
  23264. if (!_sg.cur_pass.valid) {
  23265. return true;
  23266. }
  23267. if (!_sg.next_draw_valid) {
  23268. return true;
  23269. }
  23270. // skip no-op draws
  23271. if ((0 == num_elements) || (0 == num_instances)) {
  23272. return true;
  23273. }
  23274. return false;
  23275. }
  23276. SOKOL_API_IMPL void sg_draw(int base_element, int num_elements, int num_instances) {
  23277. SOKOL_ASSERT(_sg.valid);
  23278. #if defined(SOKOL_DEBUG)
  23279. if (!_sg_validate_draw(base_element, num_elements, num_instances)) {
  23280. return;
  23281. }
  23282. #endif
  23283. _sg_stats_inc(num_draw);
  23284. if (_sg_check_skip_draw(num_elements, num_instances)) {
  23285. return;
  23286. }
  23287. _sg_draw(base_element, num_elements, num_instances, 0, 0);
  23288. _SG_TRACE_ARGS(draw, base_element, num_elements, num_instances);
  23289. }
  23290. SOKOL_API_IMPL void sg_draw_ex(int base_element, int num_elements, int num_instances, int base_vertex, int base_instance) {
  23291. SOKOL_ASSERT(_sg.valid);
  23292. #if defined(SOKOL_DEBUG)
  23293. if (!_sg_validate_draw_ex(base_element, num_elements, num_instances, base_vertex, base_instance)) {
  23294. return;
  23295. }
  23296. #endif
  23297. _sg_stats_inc(num_draw_ex);
  23298. if (_sg_check_skip_draw(num_elements, num_instances)) {
  23299. return;
  23300. }
  23301. _sg_draw(base_element, num_elements, num_instances, base_vertex, base_instance);
  23302. _SG_TRACE_ARGS(draw_ex, base_element, num_elements, num_instances, base_vertex, base_instance);
  23303. }
  23304. SOKOL_API_IMPL void sg_dispatch(int num_groups_x, int num_groups_y, int num_groups_z) {
  23305. SOKOL_ASSERT(_sg.valid);
  23306. #if defined(SOKOL_DEBUG)
  23307. if (!_sg_validate_dispatch(num_groups_x, num_groups_y, num_groups_z)) {
  23308. return;
  23309. }
  23310. #endif
  23311. _sg_stats_inc(num_dispatch);
  23312. if (!_sg.cur_pass.valid) {
  23313. return;
  23314. }
  23315. if (!_sg.next_draw_valid) {
  23316. return;
  23317. }
  23318. // skip no-op dispatches
  23319. if ((0 == num_groups_x) || (0 == num_groups_y) || (0 == num_groups_z)) {
  23320. return;
  23321. }
  23322. _sg_dispatch(num_groups_x, num_groups_y, num_groups_z);
  23323. _SG_TRACE_ARGS(dispatch, num_groups_x, num_groups_y, num_groups_z);
  23324. }
  23325. SOKOL_API_IMPL void sg_end_pass(void) {
  23326. SOKOL_ASSERT(_sg.valid);
  23327. SOKOL_ASSERT(_sg.cur_pass.in_pass);
  23328. _sg_stats_inc(num_passes);
  23329. // NOTE: don't exit early if !_sg.cur_pass.valid
  23330. const _sg_attachments_ptrs_t atts_ptrs = _sg_attachments_ptrs(&_sg.cur_pass.atts);
  23331. _sg_end_pass(&atts_ptrs);
  23332. _sg.cur_pip = _sg_pipeline_ref(0);
  23333. _sg_clear(&_sg.cur_pass, sizeof(_sg.cur_pass));
  23334. _SG_TRACE_NOARGS(end_pass);
  23335. }
  23336. SOKOL_API_IMPL void sg_commit(void) {
  23337. SOKOL_ASSERT(_sg.valid);
  23338. SOKOL_ASSERT(!_sg.cur_pass.valid);
  23339. SOKOL_ASSERT(!_sg.cur_pass.in_pass);
  23340. _sg_commit();
  23341. _sg_update_stats();
  23342. _sg_notify_commit_listeners();
  23343. _SG_TRACE_NOARGS(commit);
  23344. _sg.frame_index++;
  23345. }
  23346. SOKOL_API_IMPL void sg_reset_state_cache(void) {
  23347. SOKOL_ASSERT(_sg.valid);
  23348. _sg_reset_state_cache();
  23349. _SG_TRACE_NOARGS(reset_state_cache);
  23350. }
  23351. SOKOL_API_IMPL void sg_update_buffer(sg_buffer buf_id, const sg_range* data) {
  23352. SOKOL_ASSERT(_sg.valid);
  23353. SOKOL_ASSERT(data && data->ptr && (data->size > 0));
  23354. _sg_stats_inc(num_update_buffer);
  23355. _sg_stats_add(size_update_buffer, (uint32_t)data->size);
  23356. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  23357. if ((data->size > 0) && buf && (buf->slot.state == SG_RESOURCESTATE_VALID)) {
  23358. if (_sg_validate_update_buffer(buf, data)) {
  23359. SOKOL_ASSERT(data->size <= (size_t)buf->cmn.size);
  23360. // only one update allowed per buffer and frame
  23361. SOKOL_ASSERT(buf->cmn.update_frame_index != _sg.frame_index);
  23362. // update and append on same buffer in same frame not allowed
  23363. SOKOL_ASSERT(buf->cmn.append_frame_index != _sg.frame_index);
  23364. _sg_update_buffer(buf, data);
  23365. buf->cmn.update_frame_index = _sg.frame_index;
  23366. }
  23367. }
  23368. _SG_TRACE_ARGS(update_buffer, buf_id, data);
  23369. }
  23370. SOKOL_API_IMPL int sg_append_buffer(sg_buffer buf_id, const sg_range* data) {
  23371. SOKOL_ASSERT(_sg.valid);
  23372. SOKOL_ASSERT(data && data->ptr);
  23373. _sg_stats_inc(num_append_buffer);
  23374. _sg_stats_add(size_append_buffer, (uint32_t)data->size);
  23375. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  23376. int result;
  23377. if (buf) {
  23378. // rewind append cursor in a new frame
  23379. if (buf->cmn.append_frame_index != _sg.frame_index) {
  23380. buf->cmn.append_pos = 0;
  23381. buf->cmn.append_overflow = false;
  23382. }
  23383. if (((size_t)buf->cmn.append_pos + data->size) > (size_t)buf->cmn.size) {
  23384. buf->cmn.append_overflow = true;
  23385. }
  23386. const int start_pos = buf->cmn.append_pos;
  23387. // NOTE: the multiple-of-4 requirement for the buffer offset is coming
  23388. // from WebGPU, but we want identical behaviour between backends
  23389. SOKOL_ASSERT(_sg_multiple_u64((uint64_t)start_pos, 4));
  23390. if (buf->slot.state == SG_RESOURCESTATE_VALID) {
  23391. if (_sg_validate_append_buffer(buf, data)) {
  23392. if (!buf->cmn.append_overflow && (data->size > 0)) {
  23393. // update and append on same buffer in same frame not allowed
  23394. SOKOL_ASSERT(buf->cmn.update_frame_index != _sg.frame_index);
  23395. _sg_append_buffer(buf, data, buf->cmn.append_frame_index != _sg.frame_index);
  23396. buf->cmn.append_pos += (int) _sg_roundup_u64(data->size, 4);
  23397. buf->cmn.append_frame_index = _sg.frame_index;
  23398. }
  23399. }
  23400. }
  23401. result = start_pos;
  23402. } else {
  23403. // FIXME: should we return -1 here?
  23404. result = 0;
  23405. }
  23406. _SG_TRACE_ARGS(append_buffer, buf_id, data, result);
  23407. return result;
  23408. }
  23409. SOKOL_API_IMPL bool sg_query_buffer_overflow(sg_buffer buf_id) {
  23410. SOKOL_ASSERT(_sg.valid);
  23411. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  23412. bool result = buf ? buf->cmn.append_overflow : false;
  23413. return result;
  23414. }
  23415. SOKOL_API_IMPL bool sg_query_buffer_will_overflow(sg_buffer buf_id, size_t size) {
  23416. SOKOL_ASSERT(_sg.valid);
  23417. _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  23418. bool result = false;
  23419. if (buf) {
  23420. int append_pos = buf->cmn.append_pos;
  23421. // rewind append cursor in a new frame
  23422. if (buf->cmn.append_frame_index != _sg.frame_index) {
  23423. append_pos = 0;
  23424. }
  23425. if ((append_pos + _sg_roundup((int)size, 4)) > buf->cmn.size) {
  23426. result = true;
  23427. }
  23428. }
  23429. return result;
  23430. }
  23431. SOKOL_API_IMPL void sg_update_image(sg_image img_id, const sg_image_data* data) {
  23432. SOKOL_ASSERT(_sg.valid);
  23433. _sg_stats_inc(num_update_image);
  23434. for (int mip_index = 0; mip_index < SG_MAX_MIPMAPS; mip_index++) {
  23435. if (data->mip_levels[mip_index].size == 0) {
  23436. break;
  23437. }
  23438. _sg_stats_add(size_update_image, (uint32_t)data->mip_levels[mip_index].size);
  23439. }
  23440. _sg_image_t* img = _sg_lookup_image(img_id.id);
  23441. if (img && img->slot.state == SG_RESOURCESTATE_VALID) {
  23442. if (_sg_validate_update_image(img, data)) {
  23443. SOKOL_ASSERT(img->cmn.upd_frame_index != _sg.frame_index);
  23444. _sg_update_image(img, data);
  23445. img->cmn.upd_frame_index = _sg.frame_index;
  23446. }
  23447. }
  23448. _SG_TRACE_ARGS(update_image, img_id, data);
  23449. }
  23450. SOKOL_API_IMPL void sg_push_debug_group(const char* name) {
  23451. SOKOL_ASSERT(_sg.valid);
  23452. SOKOL_ASSERT(name);
  23453. _sg_push_debug_group(name);
  23454. _SG_TRACE_ARGS(push_debug_group, name);
  23455. }
  23456. SOKOL_API_IMPL void sg_pop_debug_group(void) {
  23457. SOKOL_ASSERT(_sg.valid);
  23458. _sg_pop_debug_group();
  23459. _SG_TRACE_NOARGS(pop_debug_group);
  23460. }
  23461. SOKOL_API_IMPL bool sg_add_commit_listener(sg_commit_listener listener) {
  23462. SOKOL_ASSERT(_sg.valid);
  23463. return _sg_add_commit_listener(&listener);
  23464. }
  23465. SOKOL_API_IMPL bool sg_remove_commit_listener(sg_commit_listener listener) {
  23466. SOKOL_ASSERT(_sg.valid);
  23467. return _sg_remove_commit_listener(&listener);
  23468. }
  23469. SOKOL_API_IMPL void sg_enable_stats(void) {
  23470. SOKOL_ASSERT(_sg.valid);
  23471. _sg.stats_enabled = true;
  23472. }
  23473. SOKOL_API_IMPL void sg_disable_stats(void) {
  23474. SOKOL_ASSERT(_sg.valid);
  23475. _sg.stats_enabled = false;
  23476. }
  23477. SOKOL_API_IMPL bool sg_stats_enabled(void) {
  23478. return _sg.stats_enabled;
  23479. }
  23480. SOKOL_API_IMPL sg_buffer_info sg_query_buffer_info(sg_buffer buf_id) {
  23481. SOKOL_ASSERT(_sg.valid);
  23482. _SG_STRUCT(sg_buffer_info, info);
  23483. const _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  23484. if (buf) {
  23485. info.slot.state = buf->slot.state;
  23486. info.slot.res_id = buf->slot.id;
  23487. info.slot.uninit_count = buf->slot.uninit_count;
  23488. info.update_frame_index = buf->cmn.update_frame_index;
  23489. info.append_frame_index = buf->cmn.append_frame_index;
  23490. info.append_pos = buf->cmn.append_pos;
  23491. info.append_overflow = buf->cmn.append_overflow;
  23492. #if defined(SOKOL_D3D11)
  23493. info.num_slots = 1;
  23494. info.active_slot = 0;
  23495. #else
  23496. info.num_slots = buf->cmn.num_slots;
  23497. info.active_slot = buf->cmn.active_slot;
  23498. #endif
  23499. }
  23500. return info;
  23501. }
  23502. SOKOL_API_IMPL sg_image_info sg_query_image_info(sg_image img_id) {
  23503. SOKOL_ASSERT(_sg.valid);
  23504. _SG_STRUCT(sg_image_info, info);
  23505. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23506. if (img) {
  23507. info.slot.state = img->slot.state;
  23508. info.slot.res_id = img->slot.id;
  23509. info.slot.uninit_count = img->slot.uninit_count;
  23510. info.upd_frame_index = img->cmn.upd_frame_index;
  23511. #if defined(SOKOL_D3D11)
  23512. info.num_slots = 1;
  23513. info.active_slot = 0;
  23514. #else
  23515. info.num_slots = img->cmn.num_slots;
  23516. info.active_slot = img->cmn.active_slot;
  23517. #endif
  23518. }
  23519. return info;
  23520. }
  23521. SOKOL_API_IMPL sg_sampler_info sg_query_sampler_info(sg_sampler smp_id) {
  23522. SOKOL_ASSERT(_sg.valid);
  23523. _SG_STRUCT(sg_sampler_info, info);
  23524. const _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  23525. if (smp) {
  23526. info.slot.state = smp->slot.state;
  23527. info.slot.res_id = smp->slot.id;
  23528. info.slot.uninit_count = smp->slot.uninit_count;
  23529. }
  23530. return info;
  23531. }
  23532. SOKOL_API_IMPL sg_shader_info sg_query_shader_info(sg_shader shd_id) {
  23533. SOKOL_ASSERT(_sg.valid);
  23534. _SG_STRUCT(sg_shader_info, info);
  23535. const _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  23536. if (shd) {
  23537. info.slot.state = shd->slot.state;
  23538. info.slot.res_id = shd->slot.id;
  23539. info.slot.uninit_count = shd->slot.uninit_count;
  23540. }
  23541. return info;
  23542. }
  23543. SOKOL_API_IMPL sg_pipeline_info sg_query_pipeline_info(sg_pipeline pip_id) {
  23544. SOKOL_ASSERT(_sg.valid);
  23545. _SG_STRUCT(sg_pipeline_info, info);
  23546. const _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  23547. if (pip) {
  23548. info.slot.state = pip->slot.state;
  23549. info.slot.res_id = pip->slot.id;
  23550. info.slot.uninit_count = pip->slot.uninit_count;
  23551. }
  23552. return info;
  23553. }
  23554. SOKOL_API_IMPL sg_view_info sg_query_view_info(sg_view view_id) {
  23555. SOKOL_ASSERT(_sg.valid);
  23556. _SG_STRUCT(sg_view_info, info);
  23557. const _sg_view_t* view = _sg_lookup_view(view_id.id);
  23558. if (view) {
  23559. info.slot.state = view->slot.state;
  23560. info.slot.res_id = view->slot.id;
  23561. info.slot.uninit_count = view->slot.uninit_count;
  23562. }
  23563. return info;
  23564. }
  23565. SOKOL_API_IMPL sg_buffer_desc sg_query_buffer_desc(sg_buffer buf_id) {
  23566. SOKOL_ASSERT(_sg.valid);
  23567. _SG_STRUCT(sg_buffer_desc, desc);
  23568. const _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  23569. if (buf) {
  23570. desc.size = (size_t)buf->cmn.size;
  23571. desc.usage = buf->cmn.usage;
  23572. }
  23573. return desc;
  23574. }
  23575. SOKOL_API_IMPL size_t sg_query_buffer_size(sg_buffer buf_id) {
  23576. SOKOL_ASSERT(_sg.valid);
  23577. const _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  23578. if (buf) {
  23579. return (size_t)buf->cmn.size;
  23580. }
  23581. return 0;
  23582. }
  23583. SOKOL_API_IMPL sg_buffer_usage sg_query_buffer_usage(sg_buffer buf_id) {
  23584. SOKOL_ASSERT(_sg.valid);
  23585. _SG_STRUCT(sg_buffer_usage, usg);
  23586. const _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  23587. if (buf) {
  23588. usg = buf->cmn.usage;
  23589. }
  23590. return usg;
  23591. }
  23592. SOKOL_API_IMPL sg_image_desc sg_query_image_desc(sg_image img_id) {
  23593. SOKOL_ASSERT(_sg.valid);
  23594. _SG_STRUCT(sg_image_desc, desc);
  23595. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23596. if (img) {
  23597. desc.type = img->cmn.type;
  23598. desc.width = img->cmn.width;
  23599. desc.height = img->cmn.height;
  23600. desc.num_slices = img->cmn.num_slices;
  23601. desc.num_mipmaps = img->cmn.num_mipmaps;
  23602. desc.usage = img->cmn.usage;
  23603. desc.pixel_format = img->cmn.pixel_format;
  23604. desc.sample_count = img->cmn.sample_count;
  23605. }
  23606. return desc;
  23607. }
  23608. SOKOL_API_IMPL sg_image_type sg_query_image_type(sg_image img_id) {
  23609. SOKOL_ASSERT(_sg.valid);
  23610. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23611. if (img) {
  23612. return img->cmn.type;
  23613. }
  23614. return _SG_IMAGETYPE_DEFAULT;
  23615. }
  23616. SOKOL_API_IMPL int sg_query_image_width(sg_image img_id) {
  23617. SOKOL_ASSERT(_sg.valid);
  23618. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23619. if (img) {
  23620. return img->cmn.width;
  23621. }
  23622. return 0;
  23623. }
  23624. SOKOL_API_IMPL int sg_query_image_height(sg_image img_id) {
  23625. SOKOL_ASSERT(_sg.valid);
  23626. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23627. if (img) {
  23628. return img->cmn.height;
  23629. }
  23630. return 0;
  23631. }
  23632. SOKOL_API_IMPL int sg_query_image_num_slices(sg_image img_id) {
  23633. SOKOL_ASSERT(_sg.valid);
  23634. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23635. if (img) {
  23636. return img->cmn.num_slices;
  23637. }
  23638. return 0;
  23639. }
  23640. SOKOL_API_IMPL int sg_query_image_num_mipmaps(sg_image img_id) {
  23641. SOKOL_ASSERT(_sg.valid);
  23642. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23643. if (img) {
  23644. return img->cmn.num_mipmaps;
  23645. }
  23646. return 0;
  23647. }
  23648. SOKOL_API_IMPL sg_pixel_format sg_query_image_pixelformat(sg_image img_id) {
  23649. SOKOL_ASSERT(_sg.valid);
  23650. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23651. if (img) {
  23652. return img->cmn.pixel_format;
  23653. }
  23654. return _SG_PIXELFORMAT_DEFAULT;
  23655. }
  23656. SOKOL_API_IMPL sg_image_usage sg_query_image_usage(sg_image img_id) {
  23657. SOKOL_ASSERT(_sg.valid);
  23658. _SG_STRUCT(sg_image_usage, usg);
  23659. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23660. if (img) {
  23661. usg = img->cmn.usage;
  23662. }
  23663. return usg;
  23664. }
  23665. SOKOL_API_IMPL int sg_query_image_sample_count(sg_image img_id) {
  23666. SOKOL_ASSERT(_sg.valid);
  23667. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23668. if (img) {
  23669. return img->cmn.sample_count;
  23670. }
  23671. return 0;
  23672. }
  23673. SOKOL_API_IMPL sg_view_type sg_query_view_type(sg_view view_id) {
  23674. SOKOL_ASSERT(_sg.valid);
  23675. const _sg_view_t* view = _sg_lookup_view(view_id.id);
  23676. if (view) {
  23677. return view->cmn.type;
  23678. } else {
  23679. return SG_VIEWTYPE_INVALID;
  23680. }
  23681. }
  23682. // NOTE: may return SG_INVALID_ID if view invalid or view not an image view
  23683. SOKOL_API_IMPL sg_image sg_query_view_image(sg_view view_id) {
  23684. SOKOL_ASSERT(_sg.valid);
  23685. _SG_STRUCT(sg_image, img);
  23686. const _sg_view_t* view = _sg_lookup_view(view_id.id);
  23687. if (view) {
  23688. img.id = view->cmn.img.ref.sref.id;
  23689. }
  23690. return img;
  23691. }
  23692. // NOTE: may return SG_INVALID_ID if view invalid or view not a buffer view
  23693. SOKOL_API_IMPL sg_buffer sg_query_view_buffer(sg_view view_id) {
  23694. SOKOL_ASSERT(_sg.valid);
  23695. _SG_STRUCT(sg_buffer, buf);
  23696. const _sg_view_t* view = _sg_lookup_view(view_id.id);
  23697. if (view) {
  23698. buf.id = view->cmn.buf.ref.sref.id;
  23699. }
  23700. return buf;
  23701. }
  23702. SOKOL_API_IMPL sg_sampler_desc sg_query_sampler_desc(sg_sampler smp_id) {
  23703. SOKOL_ASSERT(_sg.valid);
  23704. _SG_STRUCT(sg_sampler_desc, desc);
  23705. const _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  23706. if (smp) {
  23707. desc.min_filter = smp->cmn.min_filter;
  23708. desc.mag_filter = smp->cmn.mag_filter;
  23709. desc.mipmap_filter = smp->cmn.mipmap_filter;
  23710. desc.wrap_u = smp->cmn.wrap_u;
  23711. desc.wrap_v = smp->cmn.wrap_v;
  23712. desc.wrap_w = smp->cmn.wrap_w;
  23713. desc.min_lod = smp->cmn.min_lod;
  23714. desc.max_lod = smp->cmn.max_lod;
  23715. desc.border_color = smp->cmn.border_color;
  23716. desc.compare = smp->cmn.compare;
  23717. desc.max_anisotropy = smp->cmn.max_anisotropy;
  23718. }
  23719. return desc;
  23720. }
  23721. SOKOL_API_IMPL sg_shader_desc sg_query_shader_desc(sg_shader shd_id) {
  23722. SOKOL_ASSERT(_sg.valid);
  23723. _SG_STRUCT(sg_shader_desc, desc);
  23724. const _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  23725. if (shd) {
  23726. for (size_t ub_idx = 0; ub_idx < SG_MAX_UNIFORMBLOCK_BINDSLOTS; ub_idx++) {
  23727. sg_shader_uniform_block* ub_desc = &desc.uniform_blocks[ub_idx];
  23728. const _sg_shader_uniform_block_t* ub = &shd->cmn.uniform_blocks[ub_idx];
  23729. ub_desc->stage = ub->stage;
  23730. ub_desc->size = ub->size;
  23731. }
  23732. for (size_t view_idx = 0; view_idx < SG_MAX_VIEW_BINDSLOTS; view_idx++) {
  23733. const _sg_shader_view_t* view = &shd->cmn.views[view_idx];
  23734. if (view->view_type == SG_VIEWTYPE_TEXTURE) {
  23735. sg_shader_texture_view* tex_desc = &desc.views[view_idx].texture;
  23736. tex_desc->stage = view->stage;
  23737. tex_desc->image_type = view->image_type;
  23738. tex_desc->sample_type = view->sample_type;
  23739. tex_desc->multisampled = view->multisampled;
  23740. } else if (shd->cmn.views[view_idx].view_type == SG_VIEWTYPE_STORAGEBUFFER) {
  23741. sg_shader_storage_buffer_view* sbuf_desc = &desc.views[view_idx].storage_buffer;
  23742. sbuf_desc->stage = view->stage;
  23743. sbuf_desc->readonly = view->sbuf_readonly;
  23744. } else if (shd->cmn.views[view_idx].view_type == SG_VIEWTYPE_STORAGEIMAGE) {
  23745. sg_shader_storage_image_view* simg_desc = &desc.views[view_idx].storage_image;
  23746. simg_desc->stage = view->stage;
  23747. simg_desc->access_format = view->access_format;
  23748. simg_desc->image_type = view->image_type;
  23749. simg_desc->writeonly = view->simg_writeonly;
  23750. }
  23751. }
  23752. for (size_t smp_idx = 0; smp_idx < SG_MAX_SAMPLER_BINDSLOTS; smp_idx++) {
  23753. sg_shader_sampler* smp_desc = &desc.samplers[smp_idx];
  23754. const _sg_shader_sampler_t* smp = &shd->cmn.samplers[smp_idx];
  23755. smp_desc->stage = smp->stage;
  23756. smp_desc->sampler_type = smp->sampler_type;
  23757. }
  23758. for (size_t tex_smp_idx = 0; tex_smp_idx < SG_MAX_TEXTURE_SAMPLER_PAIRS; tex_smp_idx++) {
  23759. sg_shader_texture_sampler_pair* tex_smp_desc = &desc.texture_sampler_pairs[tex_smp_idx];
  23760. const _sg_shader_texture_sampler_t* tex_smp = &shd->cmn.texture_samplers[tex_smp_idx];
  23761. tex_smp_desc->stage = tex_smp->stage;
  23762. tex_smp_desc->view_slot = tex_smp->view_slot;
  23763. tex_smp_desc->sampler_slot = tex_smp->sampler_slot;
  23764. }
  23765. }
  23766. return desc;
  23767. }
  23768. SOKOL_API_IMPL sg_pipeline_desc sg_query_pipeline_desc(sg_pipeline pip_id) {
  23769. SOKOL_ASSERT(_sg.valid);
  23770. _SG_STRUCT(sg_pipeline_desc, desc);
  23771. const _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  23772. if (pip) {
  23773. desc.compute = pip->cmn.is_compute;
  23774. desc.shader.id = pip->cmn.shader.sref.id;
  23775. desc.layout = pip->cmn.layout;
  23776. desc.depth = pip->cmn.depth;
  23777. desc.stencil = pip->cmn.stencil;
  23778. desc.color_count = pip->cmn.color_count;
  23779. for (int i = 0; i < pip->cmn.color_count; i++) {
  23780. desc.colors[i] = pip->cmn.colors[i];
  23781. }
  23782. desc.primitive_type = pip->cmn.primitive_type;
  23783. desc.index_type = pip->cmn.index_type;
  23784. desc.cull_mode = pip->cmn.cull_mode;
  23785. desc.face_winding = pip->cmn.face_winding;
  23786. desc.sample_count = pip->cmn.sample_count;
  23787. desc.blend_color = pip->cmn.blend_color;
  23788. desc.alpha_to_coverage_enabled = pip->cmn.alpha_to_coverage_enabled;
  23789. }
  23790. return desc;
  23791. }
  23792. SOKOL_API_IMPL sg_view_desc sg_query_view_desc(sg_view view_id) {
  23793. SOKOL_ASSERT(_sg.valid);
  23794. _SG_STRUCT(sg_view_desc, desc);
  23795. const _sg_view_t* view = _sg_lookup_view(view_id.id);
  23796. if (view) {
  23797. switch (view->cmn.type) {
  23798. case SG_VIEWTYPE_STORAGEBUFFER:
  23799. desc.storage_buffer.buffer.id = view->cmn.buf.ref.sref.id;
  23800. desc.storage_buffer.offset = view->cmn.buf.offset;
  23801. break;
  23802. case SG_VIEWTYPE_STORAGEIMAGE:
  23803. desc.storage_image.image.id = view->cmn.img.ref.sref.id;
  23804. desc.storage_image.mip_level = view->cmn.img.mip_level;
  23805. desc.storage_image.slice = view->cmn.img.slice;
  23806. break;
  23807. case SG_VIEWTYPE_TEXTURE:
  23808. desc.texture.image.id = view->cmn.img.ref.sref.id;
  23809. desc.texture.mip_levels.base = view->cmn.img.mip_level;
  23810. desc.texture.mip_levels.count = view->cmn.img.mip_level_count;
  23811. desc.texture.slices.base = view->cmn.img.slice;
  23812. desc.texture.slices.count = view->cmn.img.slice_count;
  23813. break;
  23814. case SG_VIEWTYPE_COLORATTACHMENT:
  23815. desc.color_attachment.image.id = view->cmn.img.ref.sref.id;
  23816. desc.color_attachment.mip_level = view->cmn.img.mip_level;
  23817. desc.color_attachment.slice = view->cmn.img.slice;
  23818. break;
  23819. case SG_VIEWTYPE_RESOLVEATTACHMENT:
  23820. desc.resolve_attachment.image.id = view->cmn.img.ref.sref.id;
  23821. desc.resolve_attachment.mip_level = view->cmn.img.mip_level;
  23822. desc.resolve_attachment.slice = view->cmn.img.slice;
  23823. break;
  23824. case SG_VIEWTYPE_DEPTHSTENCILATTACHMENT:
  23825. desc.depth_stencil_attachment.image.id = view->cmn.img.ref.sref.id;
  23826. desc.depth_stencil_attachment.mip_level = view->cmn.img.mip_level;
  23827. desc.depth_stencil_attachment.slice = view->cmn.img.slice;
  23828. break;
  23829. default:
  23830. SOKOL_UNREACHABLE;
  23831. }
  23832. }
  23833. return desc;
  23834. }
  23835. SOKOL_API_IMPL sg_buffer_desc sg_query_buffer_defaults(const sg_buffer_desc* desc) {
  23836. SOKOL_ASSERT(_sg.valid && desc);
  23837. return _sg_buffer_desc_defaults(desc);
  23838. }
  23839. SOKOL_API_IMPL sg_image_desc sg_query_image_defaults(const sg_image_desc* desc) {
  23840. SOKOL_ASSERT(_sg.valid && desc);
  23841. return _sg_image_desc_defaults(desc);
  23842. }
  23843. SOKOL_API_IMPL sg_sampler_desc sg_query_sampler_defaults(const sg_sampler_desc* desc) {
  23844. SOKOL_ASSERT(_sg.valid && desc);
  23845. return _sg_sampler_desc_defaults(desc);
  23846. }
  23847. SOKOL_API_IMPL sg_shader_desc sg_query_shader_defaults(const sg_shader_desc* desc) {
  23848. SOKOL_ASSERT(_sg.valid && desc);
  23849. return _sg_shader_desc_defaults(desc);
  23850. }
  23851. SOKOL_API_IMPL sg_pipeline_desc sg_query_pipeline_defaults(const sg_pipeline_desc* desc) {
  23852. SOKOL_ASSERT(_sg.valid && desc);
  23853. return _sg_pipeline_desc_defaults(desc);
  23854. }
  23855. SOKOL_API_IMPL sg_view_desc sg_query_view_defaults(const sg_view_desc* desc) {
  23856. SOKOL_ASSERT(_sg.valid && desc);
  23857. return _sg_view_desc_defaults(desc);
  23858. }
  23859. SOKOL_API_IMPL const void* sg_d3d11_device(void) {
  23860. #if defined(SOKOL_D3D11)
  23861. return (const void*) _sg.d3d11.dev;
  23862. #else
  23863. return 0;
  23864. #endif
  23865. }
  23866. SOKOL_API_IMPL const void* sg_d3d11_device_context(void) {
  23867. #if defined(SOKOL_D3D11)
  23868. return (const void*) _sg.d3d11.ctx;
  23869. #else
  23870. return 0;
  23871. #endif
  23872. }
  23873. SOKOL_API_IMPL sg_d3d11_buffer_info sg_d3d11_query_buffer_info(sg_buffer buf_id) {
  23874. SOKOL_ASSERT(_sg.valid);
  23875. _SG_STRUCT(sg_d3d11_buffer_info, res);
  23876. #if defined(SOKOL_D3D11)
  23877. const _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  23878. if (buf) {
  23879. res.buf = (const void*) buf->d3d11.buf;
  23880. }
  23881. #else
  23882. _SOKOL_UNUSED(buf_id);
  23883. #endif
  23884. return res;
  23885. }
  23886. SOKOL_API_IMPL sg_d3d11_image_info sg_d3d11_query_image_info(sg_image img_id) {
  23887. SOKOL_ASSERT(_sg.valid);
  23888. _SG_STRUCT(sg_d3d11_image_info, res);
  23889. #if defined(SOKOL_D3D11)
  23890. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  23891. if (img) {
  23892. res.tex2d = (const void*) img->d3d11.tex2d;
  23893. res.tex3d = (const void*) img->d3d11.tex3d;
  23894. res.res = (const void*) img->d3d11.res;
  23895. }
  23896. #else
  23897. _SOKOL_UNUSED(img_id);
  23898. #endif
  23899. return res;
  23900. }
  23901. SOKOL_API_IMPL sg_d3d11_sampler_info sg_d3d11_query_sampler_info(sg_sampler smp_id) {
  23902. SOKOL_ASSERT(_sg.valid);
  23903. _SG_STRUCT(sg_d3d11_sampler_info, res);
  23904. #if defined(SOKOL_D3D11)
  23905. const _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  23906. if (smp) {
  23907. res.smp = (const void*) smp->d3d11.smp;
  23908. }
  23909. #else
  23910. _SOKOL_UNUSED(smp_id);
  23911. #endif
  23912. return res;
  23913. }
  23914. SOKOL_API_IMPL sg_d3d11_shader_info sg_d3d11_query_shader_info(sg_shader shd_id) {
  23915. SOKOL_ASSERT(_sg.valid);
  23916. _SG_STRUCT(sg_d3d11_shader_info, res);
  23917. #if defined(SOKOL_D3D11)
  23918. const _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  23919. if (shd) {
  23920. for (size_t i = 0; i < SG_MAX_UNIFORMBLOCK_BINDSLOTS; i++) {
  23921. res.cbufs[i] = (const void*) shd->d3d11.all_cbufs[i];
  23922. }
  23923. res.vs = (const void*) shd->d3d11.vs;
  23924. res.fs = (const void*) shd->d3d11.fs;
  23925. }
  23926. #else
  23927. _SOKOL_UNUSED(shd_id);
  23928. #endif
  23929. return res;
  23930. }
  23931. SOKOL_API_IMPL sg_d3d11_pipeline_info sg_d3d11_query_pipeline_info(sg_pipeline pip_id) {
  23932. SOKOL_ASSERT(_sg.valid);
  23933. _SG_STRUCT(sg_d3d11_pipeline_info, res);
  23934. #if defined(SOKOL_D3D11)
  23935. const _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  23936. if (pip) {
  23937. res.il = (const void*) pip->d3d11.il;
  23938. res.rs = (const void*) pip->d3d11.rs;
  23939. res.dss = (const void*) pip->d3d11.dss;
  23940. res.bs = (const void*) pip->d3d11.bs;
  23941. }
  23942. #else
  23943. _SOKOL_UNUSED(pip_id);
  23944. #endif
  23945. return res;
  23946. }
  23947. SOKOL_API_IMPL sg_d3d11_view_info sg_d3d11_query_view_info(sg_view view_id) {
  23948. SOKOL_ASSERT(_sg.valid);
  23949. _SG_STRUCT(sg_d3d11_view_info, res);
  23950. #if defined(SOKOL_D3D11)
  23951. const _sg_view_t* view = _sg_lookup_view(view_id.id);
  23952. res.srv = (const void*) view->d3d11.srv;
  23953. res.uav = (const void*) view->d3d11.uav;
  23954. res.rtv = (const void*) view->d3d11.rtv;
  23955. res.dsv = (const void*) view->d3d11.dsv;
  23956. #else
  23957. _SOKOL_UNUSED(view_id);
  23958. #endif
  23959. return res;
  23960. }
  23961. SOKOL_API_IMPL const void* sg_mtl_device(void) {
  23962. #if defined(SOKOL_METAL)
  23963. if (nil != _sg.mtl.device) {
  23964. return (__bridge const void*) _sg.mtl.device;
  23965. } else {
  23966. return 0;
  23967. }
  23968. #else
  23969. return 0;
  23970. #endif
  23971. }
  23972. SOKOL_API_IMPL const void* sg_mtl_render_command_encoder(void) {
  23973. #if defined(SOKOL_METAL)
  23974. if (nil != _sg.mtl.render_cmd_encoder) {
  23975. return (__bridge const void*) _sg.mtl.render_cmd_encoder;
  23976. } else {
  23977. return 0;
  23978. }
  23979. #else
  23980. return 0;
  23981. #endif
  23982. }
  23983. SOKOL_API_IMPL const void* sg_mtl_compute_command_encoder(void) {
  23984. #if defined(SOKOL_METAL)
  23985. if (nil != _sg.mtl.compute_cmd_encoder) {
  23986. return (__bridge const void*) _sg.mtl.compute_cmd_encoder;
  23987. } else {
  23988. return 0;
  23989. }
  23990. #else
  23991. return 0;
  23992. #endif
  23993. }
  23994. SOKOL_API_IMPL sg_mtl_buffer_info sg_mtl_query_buffer_info(sg_buffer buf_id) {
  23995. SOKOL_ASSERT(_sg.valid);
  23996. _SG_STRUCT(sg_mtl_buffer_info, res);
  23997. #if defined(SOKOL_METAL)
  23998. const _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  23999. if (buf) {
  24000. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  24001. if (buf->mtl.buf[i] != 0) {
  24002. res.buf[i] = (__bridge void*) _sg_mtl_id(buf->mtl.buf[i]);
  24003. }
  24004. }
  24005. res.active_slot = buf->cmn.active_slot;
  24006. }
  24007. #else
  24008. _SOKOL_UNUSED(buf_id);
  24009. #endif
  24010. return res;
  24011. }
  24012. SOKOL_API_IMPL sg_mtl_image_info sg_mtl_query_image_info(sg_image img_id) {
  24013. SOKOL_ASSERT(_sg.valid);
  24014. _SG_STRUCT(sg_mtl_image_info, res);
  24015. #if defined(SOKOL_METAL)
  24016. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  24017. if (img) {
  24018. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  24019. if (img->mtl.tex[i] != 0) {
  24020. res.tex[i] = (__bridge void*) _sg_mtl_id(img->mtl.tex[i]);
  24021. }
  24022. }
  24023. res.active_slot = img->cmn.active_slot;
  24024. }
  24025. #else
  24026. _SOKOL_UNUSED(img_id);
  24027. #endif
  24028. return res;
  24029. }
  24030. SOKOL_API_IMPL sg_mtl_sampler_info sg_mtl_query_sampler_info(sg_sampler smp_id) {
  24031. SOKOL_ASSERT(_sg.valid);
  24032. _SG_STRUCT(sg_mtl_sampler_info, res);
  24033. #if defined(SOKOL_METAL)
  24034. const _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  24035. if (smp) {
  24036. if (smp->mtl.sampler_state != 0) {
  24037. res.smp = (__bridge void*) _sg_mtl_id(smp->mtl.sampler_state);
  24038. }
  24039. }
  24040. #else
  24041. _SOKOL_UNUSED(smp_id);
  24042. #endif
  24043. return res;
  24044. }
  24045. SOKOL_API_IMPL sg_mtl_shader_info sg_mtl_query_shader_info(sg_shader shd_id) {
  24046. SOKOL_ASSERT(_sg.valid);
  24047. _SG_STRUCT(sg_mtl_shader_info, res);
  24048. #if defined(SOKOL_METAL)
  24049. const _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  24050. if (shd) {
  24051. const int vertex_lib = shd->mtl.vertex_func.mtl_lib;
  24052. const int vertex_func = shd->mtl.vertex_func.mtl_func;
  24053. const int fragment_lib = shd->mtl.fragment_func.mtl_lib;
  24054. const int fragment_func = shd->mtl.fragment_func.mtl_func;
  24055. if (vertex_lib != 0) {
  24056. res.vertex_lib = (__bridge void*) _sg_mtl_id(vertex_lib);
  24057. }
  24058. if (fragment_lib != 0) {
  24059. res.fragment_lib = (__bridge void*) _sg_mtl_id(fragment_lib);
  24060. }
  24061. if (vertex_func != 0) {
  24062. res.vertex_func = (__bridge void*) _sg_mtl_id(vertex_func);
  24063. }
  24064. if (fragment_func != 0) {
  24065. res.fragment_func = (__bridge void*) _sg_mtl_id(fragment_func);
  24066. }
  24067. }
  24068. #else
  24069. _SOKOL_UNUSED(shd_id);
  24070. #endif
  24071. return res;
  24072. }
  24073. SOKOL_API_IMPL sg_mtl_pipeline_info sg_mtl_query_pipeline_info(sg_pipeline pip_id) {
  24074. SOKOL_ASSERT(_sg.valid);
  24075. _SG_STRUCT(sg_mtl_pipeline_info, res);
  24076. #if defined(SOKOL_METAL)
  24077. const _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  24078. if (pip) {
  24079. if (pip->mtl.rps != 0) {
  24080. res.rps = (__bridge void*) _sg_mtl_id(pip->mtl.rps);
  24081. }
  24082. if (pip->mtl.dss != 0) {
  24083. res.dss = (__bridge void*) _sg_mtl_id(pip->mtl.dss);
  24084. }
  24085. }
  24086. #else
  24087. _SOKOL_UNUSED(pip_id);
  24088. #endif
  24089. return res;
  24090. }
  24091. SOKOL_API_IMPL const void* sg_wgpu_device(void) {
  24092. #if defined(SOKOL_WGPU)
  24093. return (const void*) _sg.wgpu.dev;
  24094. #else
  24095. return 0;
  24096. #endif
  24097. }
  24098. SOKOL_API_IMPL const void* sg_wgpu_queue(void) {
  24099. #if defined(SOKOL_WGPU)
  24100. return (const void*) _sg.wgpu.queue;
  24101. #else
  24102. return 0;
  24103. #endif
  24104. }
  24105. SOKOL_API_IMPL const void* sg_wgpu_command_encoder(void) {
  24106. #if defined(SOKOL_WGPU)
  24107. return (const void*) _sg.wgpu.cmd_enc;
  24108. #else
  24109. return 0;
  24110. #endif
  24111. }
  24112. SOKOL_API_IMPL const void* sg_wgpu_render_pass_encoder(void) {
  24113. #if defined(SOKOL_WGPU)
  24114. return (const void*) _sg.wgpu.rpass_enc;
  24115. #else
  24116. return 0;
  24117. #endif
  24118. }
  24119. SOKOL_API_IMPL const void* sg_wgpu_compute_pass_encoder(void) {
  24120. #if defined(SOKOL_WGPU)
  24121. return (const void*) _sg.wgpu.cpass_enc;
  24122. #else
  24123. return 0;
  24124. #endif
  24125. }
  24126. SOKOL_API_IMPL sg_wgpu_buffer_info sg_wgpu_query_buffer_info(sg_buffer buf_id) {
  24127. SOKOL_ASSERT(_sg.valid);
  24128. _SG_STRUCT(sg_wgpu_buffer_info, res);
  24129. #if defined(SOKOL_WGPU)
  24130. const _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  24131. if (buf) {
  24132. res.buf = (const void*) buf->wgpu.buf;
  24133. }
  24134. #else
  24135. _SOKOL_UNUSED(buf_id);
  24136. #endif
  24137. return res;
  24138. }
  24139. SOKOL_API_IMPL sg_wgpu_image_info sg_wgpu_query_image_info(sg_image img_id) {
  24140. SOKOL_ASSERT(_sg.valid);
  24141. _SG_STRUCT(sg_wgpu_image_info, res);
  24142. #if defined(SOKOL_WGPU)
  24143. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  24144. if (img) {
  24145. res.tex = (const void*) img->wgpu.tex;
  24146. }
  24147. #else
  24148. _SOKOL_UNUSED(img_id);
  24149. #endif
  24150. return res;
  24151. }
  24152. SOKOL_API_IMPL sg_wgpu_sampler_info sg_wgpu_query_sampler_info(sg_sampler smp_id) {
  24153. SOKOL_ASSERT(_sg.valid);
  24154. _SG_STRUCT(sg_wgpu_sampler_info, res);
  24155. #if defined(SOKOL_WGPU)
  24156. const _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  24157. if (smp) {
  24158. res.smp = (const void*) smp->wgpu.smp;
  24159. }
  24160. #else
  24161. _SOKOL_UNUSED(smp_id);
  24162. #endif
  24163. return res;
  24164. }
  24165. SOKOL_API_IMPL sg_wgpu_shader_info sg_wgpu_query_shader_info(sg_shader shd_id) {
  24166. SOKOL_ASSERT(_sg.valid);
  24167. _SG_STRUCT(sg_wgpu_shader_info, res);
  24168. #if defined(SOKOL_WGPU)
  24169. const _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  24170. if (shd) {
  24171. res.vs_mod = (const void*) shd->wgpu.vertex_func.module;
  24172. res.fs_mod = (const void*) shd->wgpu.fragment_func.module;
  24173. res.bgl = (const void*) shd->wgpu.bgl_view_smp;
  24174. }
  24175. #else
  24176. _SOKOL_UNUSED(shd_id);
  24177. #endif
  24178. return res;
  24179. }
  24180. SOKOL_API_IMPL sg_wgpu_pipeline_info sg_wgpu_query_pipeline_info(sg_pipeline pip_id) {
  24181. SOKOL_ASSERT(_sg.valid);
  24182. _SG_STRUCT(sg_wgpu_pipeline_info, res);
  24183. #if defined(SOKOL_WGPU)
  24184. const _sg_pipeline_t* pip = _sg_lookup_pipeline(pip_id.id);
  24185. if (pip) {
  24186. res.render_pipeline = (const void*) pip->wgpu.rpip;
  24187. res.compute_pipeline = (const void*) pip->wgpu.cpip;
  24188. }
  24189. #else
  24190. _SOKOL_UNUSED(pip_id);
  24191. #endif
  24192. return res;
  24193. }
  24194. SOKOL_API_IMPL sg_wgpu_view_info sg_wgpu_query_view_info(sg_view view_id) {
  24195. SOKOL_ASSERT(_sg.valid);
  24196. _SG_STRUCT(sg_wgpu_view_info, res);
  24197. #if defined(SOKOL_WGPU)
  24198. const _sg_view_t* view = _sg_lookup_view(view_id.id);
  24199. if (view) {
  24200. res.view = (const void*) view->wgpu.view;
  24201. }
  24202. #else
  24203. _SOKOL_UNUSED(view_id);
  24204. #endif
  24205. return res;
  24206. }
  24207. SOKOL_API_IMPL sg_gl_buffer_info sg_gl_query_buffer_info(sg_buffer buf_id) {
  24208. SOKOL_ASSERT(_sg.valid);
  24209. _SG_STRUCT(sg_gl_buffer_info, res);
  24210. #if defined(_SOKOL_ANY_GL)
  24211. const _sg_buffer_t* buf = _sg_lookup_buffer(buf_id.id);
  24212. if (buf) {
  24213. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  24214. res.buf[i] = buf->gl.buf[i];
  24215. }
  24216. res.active_slot = buf->cmn.active_slot;
  24217. }
  24218. #else
  24219. _SOKOL_UNUSED(buf_id);
  24220. #endif
  24221. return res;
  24222. }
  24223. SOKOL_API_IMPL sg_gl_image_info sg_gl_query_image_info(sg_image img_id) {
  24224. SOKOL_ASSERT(_sg.valid);
  24225. _SG_STRUCT(sg_gl_image_info, res);
  24226. #if defined(_SOKOL_ANY_GL)
  24227. const _sg_image_t* img = _sg_lookup_image(img_id.id);
  24228. if (img) {
  24229. for (int i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  24230. res.tex[i] = img->gl.tex[i];
  24231. }
  24232. res.tex_target = img->gl.target;
  24233. res.active_slot = img->cmn.active_slot;
  24234. }
  24235. #else
  24236. _SOKOL_UNUSED(img_id);
  24237. #endif
  24238. return res;
  24239. }
  24240. SOKOL_API_IMPL sg_gl_sampler_info sg_gl_query_sampler_info(sg_sampler smp_id) {
  24241. SOKOL_ASSERT(_sg.valid);
  24242. _SG_STRUCT(sg_gl_sampler_info, res);
  24243. #if defined(_SOKOL_ANY_GL)
  24244. const _sg_sampler_t* smp = _sg_lookup_sampler(smp_id.id);
  24245. if (smp) {
  24246. res.smp = smp->gl.smp;
  24247. }
  24248. #else
  24249. _SOKOL_UNUSED(smp_id);
  24250. #endif
  24251. return res;
  24252. }
  24253. SOKOL_API_IMPL sg_gl_shader_info sg_gl_query_shader_info(sg_shader shd_id) {
  24254. SOKOL_ASSERT(_sg.valid);
  24255. _SG_STRUCT(sg_gl_shader_info, res);
  24256. #if defined(_SOKOL_ANY_GL)
  24257. const _sg_shader_t* shd = _sg_lookup_shader(shd_id.id);
  24258. if (shd) {
  24259. res.prog = shd->gl.prog;
  24260. }
  24261. #else
  24262. _SOKOL_UNUSED(shd_id);
  24263. #endif
  24264. return res;
  24265. }
  24266. SOKOL_API_IMPL sg_gl_view_info sg_gl_query_view_info(sg_view view_id) {
  24267. SOKOL_ASSERT(_sg.valid);
  24268. _SG_STRUCT(sg_gl_view_info, res);
  24269. #if defined(_SOKOL_ANY_GL)
  24270. const _sg_view_t* view = _sg_lookup_view(view_id.id);
  24271. if (view) {
  24272. for (size_t i = 0; i < SG_NUM_INFLIGHT_FRAMES; i++) {
  24273. res.tex_view[i] = view->gl.tex_view[i];
  24274. }
  24275. res.msaa_render_buffer = view->gl.msaa_render_buffer;
  24276. res.msaa_resolve_frame_buffer = view->gl.msaa_resolve_frame_buffer;
  24277. }
  24278. #else
  24279. _SOKOL_UNUSED(view_id);
  24280. #endif
  24281. return res;
  24282. }
  24283. #ifdef _MSC_VER
  24284. #pragma warning(pop)
  24285. #endif
  24286. #endif // SOKOL_GFX_IMPL