webgpuTextureManager.js 49 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013
  1. /* eslint-disable @typescript-eslint/naming-convention */
  2. /* eslint-disable babylonjs/available */
  3. /* eslint-disable jsdoc/require-jsdoc */
  4. // License for the mipmap generation code:
  5. //
  6. // Copyright 2020 Brandon Jones
  7. //
  8. // Permission is hereby granted, free of charge, to any person obtaining a copy
  9. // of this software and associated documentation files (the "Software"), to deal
  10. // in the Software without restriction, including without limitation the rights
  11. // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
  12. // copies of the Software, and to permit persons to whom the Software is
  13. // furnished to do so, subject to the following conditions:
  14. // The above copyright notice and this permission notice shall be included in
  15. // all copies or substantial portions of the Software.
  16. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  17. // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  18. // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
  19. // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
  20. // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
  21. // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
  22. // SOFTWARE.
  23. import * as WebGPUConstants from "./webgpuConstants.js";
  24. import { InternalTextureSource } from "../../Materials/Textures/internalTexture.js";
  25. import { WebGPUHardwareTexture } from "./webgpuHardwareTexture.js";
  26. import { WebGPUTextureHelper } from "./webgpuTextureHelper.js";
  27. // TODO WEBGPU improve mipmap generation by using compute shaders
  28. // TODO WEBGPU use WGSL instead of GLSL
  29. const mipmapVertexSource = `
  30. const vec2 pos[4] = vec2[4](vec2(-1.0f, 1.0f), vec2(1.0f, 1.0f), vec2(-1.0f, -1.0f), vec2(1.0f, -1.0f));
  31. const vec2 tex[4] = vec2[4](vec2(0.0f, 0.0f), vec2(1.0f, 0.0f), vec2(0.0f, 1.0f), vec2(1.0f, 1.0f));
  32. layout(location = 0) out vec2 vTex;
  33. void main() {
  34. vTex = tex[gl_VertexIndex];
  35. gl_Position = vec4(pos[gl_VertexIndex], 0.0, 1.0);
  36. }
  37. `;
  38. const mipmapFragmentSource = `
  39. layout(set = 0, binding = 0) uniform sampler imgSampler;
  40. layout(set = 0, binding = 1) uniform texture2D img;
  41. layout(location = 0) in vec2 vTex;
  42. layout(location = 0) out vec4 outColor;
  43. void main() {
  44. outColor = texture(sampler2D(img, imgSampler), vTex);
  45. }
  46. `;
  47. const invertYPreMultiplyAlphaVertexSource = `
  48. #extension GL_EXT_samplerless_texture_functions : enable
  49. const vec2 pos[4] = vec2[4](vec2(-1.0f, 1.0f), vec2(1.0f, 1.0f), vec2(-1.0f, -1.0f), vec2(1.0f, -1.0f));
  50. const vec2 tex[4] = vec2[4](vec2(0.0f, 0.0f), vec2(1.0f, 0.0f), vec2(0.0f, 1.0f), vec2(1.0f, 1.0f));
  51. layout(set = 0, binding = 0) uniform texture2D img;
  52. #ifdef INVERTY
  53. layout(location = 0) out flat ivec2 vTextureSize;
  54. #endif
  55. void main() {
  56. #ifdef INVERTY
  57. vTextureSize = textureSize(img, 0);
  58. #endif
  59. gl_Position = vec4(pos[gl_VertexIndex], 0.0, 1.0);
  60. }
  61. `;
  62. const invertYPreMultiplyAlphaFragmentSource = `
  63. #extension GL_EXT_samplerless_texture_functions : enable
  64. layout(set = 0, binding = 0) uniform texture2D img;
  65. #ifdef INVERTY
  66. layout(location = 0) in flat ivec2 vTextureSize;
  67. #endif
  68. layout(location = 0) out vec4 outColor;
  69. void main() {
  70. #ifdef INVERTY
  71. vec4 color = texelFetch(img, ivec2(gl_FragCoord.x, vTextureSize.y - gl_FragCoord.y), 0);
  72. #else
  73. vec4 color = texelFetch(img, ivec2(gl_FragCoord.xy), 0);
  74. #endif
  75. #ifdef PREMULTIPLYALPHA
  76. color.rgb *= color.a;
  77. #endif
  78. outColor = color;
  79. }
  80. `;
  81. const invertYPreMultiplyAlphaWithOfstVertexSource = invertYPreMultiplyAlphaVertexSource;
  82. const invertYPreMultiplyAlphaWithOfstFragmentSource = `
  83. #extension GL_EXT_samplerless_texture_functions : enable
  84. layout(set = 0, binding = 0) uniform texture2D img;
  85. layout(set = 0, binding = 1) uniform Params {
  86. float ofstX;
  87. float ofstY;
  88. float width;
  89. float height;
  90. };
  91. #ifdef INVERTY
  92. layout(location = 0) in flat ivec2 vTextureSize;
  93. #endif
  94. layout(location = 0) out vec4 outColor;
  95. void main() {
  96. if (gl_FragCoord.x < ofstX || gl_FragCoord.x >= ofstX + width) {
  97. discard;
  98. }
  99. if (gl_FragCoord.y < ofstY || gl_FragCoord.y >= ofstY + height) {
  100. discard;
  101. }
  102. #ifdef INVERTY
  103. vec4 color = texelFetch(img, ivec2(gl_FragCoord.x, ofstY + height - (gl_FragCoord.y - ofstY)), 0);
  104. #else
  105. vec4 color = texelFetch(img, ivec2(gl_FragCoord.xy), 0);
  106. #endif
  107. #ifdef PREMULTIPLYALPHA
  108. color.rgb *= color.a;
  109. #endif
  110. outColor = color;
  111. }
  112. `;
  113. const clearVertexSource = `
  114. const vec2 pos[4] = vec2[4](vec2(-1.0f, 1.0f), vec2(1.0f, 1.0f), vec2(-1.0f, -1.0f), vec2(1.0f, -1.0f));
  115. void main() {
  116. gl_Position = vec4(pos[gl_VertexIndex], 0.0, 1.0);
  117. }
  118. `;
  119. const clearFragmentSource = `
  120. layout(set = 0, binding = 0) uniform Uniforms {
  121. uniform vec4 color;
  122. };
  123. layout(location = 0) out vec4 outColor;
  124. void main() {
  125. outColor = color;
  126. }
  127. `;
  128. const copyVideoToTextureVertexSource = `
  129. struct VertexOutput {
  130. @builtin(position) Position : vec4<f32>,
  131. @location(0) fragUV : vec2<f32>
  132. }
  133. @vertex
  134. fn main(
  135. @builtin(vertex_index) VertexIndex : u32
  136. ) -> VertexOutput {
  137. var pos = array<vec2<f32>, 4>(
  138. vec2(-1.0, 1.0),
  139. vec2( 1.0, 1.0),
  140. vec2(-1.0, -1.0),
  141. vec2( 1.0, -1.0)
  142. );
  143. var tex = array<vec2<f32>, 4>(
  144. vec2(0.0, 0.0),
  145. vec2(1.0, 0.0),
  146. vec2(0.0, 1.0),
  147. vec2(1.0, 1.0)
  148. );
  149. var output: VertexOutput;
  150. output.Position = vec4<f32>(pos[VertexIndex], 0.0, 1.0);
  151. output.fragUV = tex[VertexIndex];
  152. return output;
  153. }
  154. `;
  155. const copyVideoToTextureFragmentSource = `
  156. @group(0) @binding(0) var videoSampler: sampler;
  157. @group(0) @binding(1) var videoTexture: texture_external;
  158. @fragment
  159. fn main(
  160. @location(0) fragUV: vec2<f32>
  161. ) -> @location(0) vec4<f32> {
  162. return textureSampleBaseClampToEdge(videoTexture, videoSampler, fragUV);
  163. }
  164. `;
  165. const copyVideoToTextureInvertYFragmentSource = `
  166. @group(0) @binding(0) var videoSampler: sampler;
  167. @group(0) @binding(1) var videoTexture: texture_external;
  168. @fragment
  169. fn main(
  170. @location(0) fragUV: vec2<f32>
  171. ) -> @location(0) vec4<f32> {
  172. return textureSampleBaseClampToEdge(videoTexture, videoSampler, vec2<f32>(fragUV.x, 1.0 - fragUV.y));
  173. }
  174. `;
  175. var PipelineType;
  176. (function (PipelineType) {
  177. PipelineType[PipelineType["MipMap"] = 0] = "MipMap";
  178. PipelineType[PipelineType["InvertYPremultiplyAlpha"] = 1] = "InvertYPremultiplyAlpha";
  179. PipelineType[PipelineType["Clear"] = 2] = "Clear";
  180. PipelineType[PipelineType["InvertYPremultiplyAlphaWithOfst"] = 3] = "InvertYPremultiplyAlphaWithOfst";
  181. })(PipelineType || (PipelineType = {}));
  182. var VideoPipelineType;
  183. (function (VideoPipelineType) {
  184. VideoPipelineType[VideoPipelineType["DontInvertY"] = 0] = "DontInvertY";
  185. VideoPipelineType[VideoPipelineType["InvertY"] = 1] = "InvertY";
  186. })(VideoPipelineType || (VideoPipelineType = {}));
  187. const shadersForPipelineType = [
  188. { vertex: mipmapVertexSource, fragment: mipmapFragmentSource },
  189. { vertex: invertYPreMultiplyAlphaVertexSource, fragment: invertYPreMultiplyAlphaFragmentSource },
  190. { vertex: clearVertexSource, fragment: clearFragmentSource },
  191. { vertex: invertYPreMultiplyAlphaWithOfstVertexSource, fragment: invertYPreMultiplyAlphaWithOfstFragmentSource },
  192. ];
  193. /**
  194. * Map a (renderable) texture format (GPUTextureFormat) to an index for fast lookup (in caches for eg)
  195. * The number of entries should not go over 64! Else, the code in WebGPUCacheRenderPipeline.setMRT should be updated
  196. */
  197. export const renderableTextureFormatToIndex = {
  198. "": 0,
  199. r8unorm: 1,
  200. r8uint: 2,
  201. r8sint: 3,
  202. r16uint: 4,
  203. r16sint: 5,
  204. r16float: 6,
  205. rg8unorm: 7,
  206. rg8uint: 8,
  207. rg8sint: 9,
  208. r32uint: 10,
  209. r32sint: 11,
  210. r32float: 12,
  211. rg16uint: 13,
  212. rg16sint: 14,
  213. rg16float: 15,
  214. rgba8unorm: 16,
  215. "rgba8unorm-srgb": 17,
  216. rgba8uint: 18,
  217. rgba8sint: 19,
  218. bgra8unorm: 20,
  219. "bgra8unorm-srgb": 21,
  220. rgb10a2uint: 22,
  221. rgb10a2unorm: 23,
  222. /* rg11b10ufloat: this entry is dynamically added if the "RG11B10UFloatRenderable" extension is supported */
  223. rg32uint: 24,
  224. rg32sint: 25,
  225. rg32float: 26,
  226. rgba16uint: 27,
  227. rgba16sint: 28,
  228. rgba16float: 29,
  229. rgba32uint: 30,
  230. rgba32sint: 31,
  231. rgba32float: 32,
  232. stencil8: 33,
  233. depth16unorm: 34,
  234. depth24plus: 35,
  235. "depth24plus-stencil8": 36,
  236. depth32float: 37,
  237. "depth32float-stencil8": 38,
  238. };
  239. /** @internal */
  240. export class WebGPUTextureManager {
  241. //------------------------------------------------------------------------------
  242. // Initialization / Helpers
  243. //------------------------------------------------------------------------------
  244. constructor(engine, device, glslang, tintWASM, bufferManager, enabledExtensions) {
  245. this._pipelines = {};
  246. this._compiledShaders = [];
  247. this._videoPipelines = {};
  248. this._videoCompiledShaders = [];
  249. this._deferredReleaseTextures = [];
  250. this._engine = engine;
  251. this._device = device;
  252. this._glslang = glslang;
  253. this._tintWASM = tintWASM;
  254. this._bufferManager = bufferManager;
  255. if (enabledExtensions.indexOf(WebGPUConstants.FeatureName.RG11B10UFloatRenderable) !== -1) {
  256. const keys = Object.keys(renderableTextureFormatToIndex);
  257. renderableTextureFormatToIndex[WebGPUConstants.TextureFormat.RG11B10UFloat] = renderableTextureFormatToIndex[keys[keys.length - 1]] + 1;
  258. }
  259. this._mipmapSampler = device.createSampler({ minFilter: WebGPUConstants.FilterMode.Linear });
  260. this._videoSampler = device.createSampler({ minFilter: WebGPUConstants.FilterMode.Linear });
  261. this._ubCopyWithOfst = this._bufferManager.createBuffer(4 * 4, WebGPUConstants.BufferUsage.Uniform | WebGPUConstants.BufferUsage.CopyDst, "UBCopyWithOffset").underlyingResource;
  262. this._getPipeline(WebGPUConstants.TextureFormat.RGBA8Unorm);
  263. this._getVideoPipeline(WebGPUConstants.TextureFormat.RGBA8Unorm);
  264. }
  265. _getPipeline(format, type = PipelineType.MipMap, params) {
  266. const index = type === PipelineType.MipMap
  267. ? 1 << 0
  268. : type === PipelineType.InvertYPremultiplyAlpha
  269. ? ((params.invertY ? 1 : 0) << 1) + ((params.premultiplyAlpha ? 1 : 0) << 2)
  270. : type === PipelineType.Clear
  271. ? 1 << 3
  272. : type === PipelineType.InvertYPremultiplyAlphaWithOfst
  273. ? ((params.invertY ? 1 : 0) << 4) + ((params.premultiplyAlpha ? 1 : 0) << 5)
  274. : 0;
  275. if (!this._pipelines[format]) {
  276. this._pipelines[format] = [];
  277. }
  278. let pipelineAndBGL = this._pipelines[format][index];
  279. if (!pipelineAndBGL) {
  280. let defines = "#version 450\n";
  281. if (type === PipelineType.InvertYPremultiplyAlpha || type === PipelineType.InvertYPremultiplyAlphaWithOfst) {
  282. if (params.invertY) {
  283. defines += "#define INVERTY\n";
  284. }
  285. if (params.premultiplyAlpha) {
  286. defines += "#define PREMULTIPLYALPHA\n";
  287. }
  288. }
  289. let modules = this._compiledShaders[index];
  290. if (!modules) {
  291. let vertexCode = this._glslang.compileGLSL(defines + shadersForPipelineType[type].vertex, "vertex");
  292. let fragmentCode = this._glslang.compileGLSL(defines + shadersForPipelineType[type].fragment, "fragment");
  293. if (this._tintWASM) {
  294. vertexCode = this._tintWASM.convertSpirV2WGSL(vertexCode);
  295. fragmentCode = this._tintWASM.convertSpirV2WGSL(fragmentCode);
  296. }
  297. const vertexModule = this._device.createShaderModule({
  298. code: vertexCode,
  299. });
  300. const fragmentModule = this._device.createShaderModule({
  301. code: fragmentCode,
  302. });
  303. modules = this._compiledShaders[index] = [vertexModule, fragmentModule];
  304. }
  305. const pipeline = this._device.createRenderPipeline({
  306. layout: WebGPUConstants.AutoLayoutMode.Auto,
  307. vertex: {
  308. module: modules[0],
  309. entryPoint: "main",
  310. },
  311. fragment: {
  312. module: modules[1],
  313. entryPoint: "main",
  314. targets: [
  315. {
  316. format,
  317. },
  318. ],
  319. },
  320. primitive: {
  321. topology: WebGPUConstants.PrimitiveTopology.TriangleStrip,
  322. stripIndexFormat: WebGPUConstants.IndexFormat.Uint16,
  323. },
  324. });
  325. pipelineAndBGL = this._pipelines[format][index] = [pipeline, pipeline.getBindGroupLayout(0)];
  326. }
  327. return pipelineAndBGL;
  328. }
  329. _getVideoPipeline(format, type = VideoPipelineType.DontInvertY) {
  330. const index = type === VideoPipelineType.InvertY ? 1 << 0 : 0;
  331. if (!this._videoPipelines[format]) {
  332. this._videoPipelines[format] = [];
  333. }
  334. let pipelineAndBGL = this._videoPipelines[format][index];
  335. if (!pipelineAndBGL) {
  336. let modules = this._videoCompiledShaders[index];
  337. if (!modules) {
  338. const vertexModule = this._device.createShaderModule({
  339. code: copyVideoToTextureVertexSource,
  340. });
  341. const fragmentModule = this._device.createShaderModule({
  342. code: index === 0 ? copyVideoToTextureFragmentSource : copyVideoToTextureInvertYFragmentSource,
  343. });
  344. modules = this._videoCompiledShaders[index] = [vertexModule, fragmentModule];
  345. }
  346. const pipeline = this._device.createRenderPipeline({
  347. label: `BabylonWebGPUDevice${this._engine.uniqueId}_CopyVideoToTexture_${format}_${index === 0 ? "DontInvertY" : "InvertY"}`,
  348. layout: WebGPUConstants.AutoLayoutMode.Auto,
  349. vertex: {
  350. module: modules[0],
  351. entryPoint: "main",
  352. },
  353. fragment: {
  354. module: modules[1],
  355. entryPoint: "main",
  356. targets: [
  357. {
  358. format,
  359. },
  360. ],
  361. },
  362. primitive: {
  363. topology: WebGPUConstants.PrimitiveTopology.TriangleStrip,
  364. stripIndexFormat: WebGPUConstants.IndexFormat.Uint16,
  365. },
  366. });
  367. pipelineAndBGL = this._videoPipelines[format][index] = [pipeline, pipeline.getBindGroupLayout(0)];
  368. }
  369. return pipelineAndBGL;
  370. }
  371. setCommandEncoder(encoder) {
  372. this._commandEncoderForCreation = encoder;
  373. }
  374. copyVideoToTexture(video, texture, format, invertY = false, commandEncoder) {
  375. const useOwnCommandEncoder = commandEncoder === undefined;
  376. const [pipeline, bindGroupLayout] = this._getVideoPipeline(format, invertY ? VideoPipelineType.InvertY : VideoPipelineType.DontInvertY);
  377. if (useOwnCommandEncoder) {
  378. commandEncoder = this._device.createCommandEncoder({});
  379. }
  380. commandEncoder.pushDebugGroup?.(`copy video to texture - invertY=${invertY}`);
  381. const webgpuHardwareTexture = texture._hardwareTexture;
  382. const renderPassDescriptor = {
  383. label: `BabylonWebGPUDevice${this._engine.uniqueId}_copyVideoToTexture_${format}_${invertY ? "InvertY" : "DontInvertY"}${texture.label ? "_" + texture.label : ""}`,
  384. colorAttachments: [
  385. {
  386. view: webgpuHardwareTexture.underlyingResource.createView({
  387. format,
  388. dimension: WebGPUConstants.TextureViewDimension.E2d,
  389. mipLevelCount: 1,
  390. baseArrayLayer: 0,
  391. baseMipLevel: 0,
  392. arrayLayerCount: 1,
  393. aspect: WebGPUConstants.TextureAspect.All,
  394. }),
  395. loadOp: WebGPUConstants.LoadOp.Load,
  396. storeOp: WebGPUConstants.StoreOp.Store,
  397. },
  398. ],
  399. };
  400. const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
  401. const descriptor = {
  402. layout: bindGroupLayout,
  403. entries: [
  404. {
  405. binding: 0,
  406. resource: this._videoSampler,
  407. },
  408. {
  409. binding: 1,
  410. resource: this._device.importExternalTexture({
  411. source: video.underlyingResource,
  412. }),
  413. },
  414. ],
  415. };
  416. const bindGroup = this._device.createBindGroup(descriptor);
  417. passEncoder.setPipeline(pipeline);
  418. passEncoder.setBindGroup(0, bindGroup);
  419. passEncoder.draw(4, 1, 0, 0);
  420. passEncoder.end();
  421. commandEncoder.popDebugGroup?.();
  422. if (useOwnCommandEncoder) {
  423. this._device.queue.submit([commandEncoder.finish()]);
  424. commandEncoder = null;
  425. }
  426. }
  427. invertYPreMultiplyAlpha(gpuOrHdwTexture, width, height, format, invertY = false, premultiplyAlpha = false, faceIndex = 0, mipLevel = 0, layers = 1, ofstX = 0, ofstY = 0, rectWidth = 0, rectHeight = 0, commandEncoder,
  428. // eslint-disable-next-line @typescript-eslint/no-unused-vars
  429. allowGPUOptimization) {
  430. const useRect = rectWidth !== 0;
  431. const useOwnCommandEncoder = commandEncoder === undefined;
  432. const [pipeline, bindGroupLayout] = this._getPipeline(format, useRect ? PipelineType.InvertYPremultiplyAlphaWithOfst : PipelineType.InvertYPremultiplyAlpha, {
  433. invertY,
  434. premultiplyAlpha,
  435. });
  436. faceIndex = Math.max(faceIndex, 0);
  437. if (useOwnCommandEncoder) {
  438. commandEncoder = this._device.createCommandEncoder({});
  439. }
  440. commandEncoder.pushDebugGroup?.(`internal process texture - invertY=${invertY} premultiplyAlpha=${premultiplyAlpha}`);
  441. let gpuTexture;
  442. if (WebGPUTextureHelper.IsHardwareTexture(gpuOrHdwTexture)) {
  443. gpuTexture = gpuOrHdwTexture.underlyingResource;
  444. if (!(invertY && !premultiplyAlpha && layers === 1 && faceIndex === 0)) {
  445. // we optimize only for the most likely case (invertY=true, premultiplyAlpha=false, layers=1, faceIndex=0) to avoid dealing with big caches
  446. gpuOrHdwTexture = undefined;
  447. }
  448. }
  449. else {
  450. gpuTexture = gpuOrHdwTexture;
  451. gpuOrHdwTexture = undefined;
  452. }
  453. if (!gpuTexture) {
  454. return;
  455. }
  456. if (useRect) {
  457. this._bufferManager.setRawData(this._ubCopyWithOfst, 0, new Float32Array([ofstX, ofstY, rectWidth, rectHeight]), 0, 4 * 4);
  458. }
  459. const webgpuHardwareTexture = gpuOrHdwTexture;
  460. const outputTexture = webgpuHardwareTexture?._copyInvertYTempTexture ??
  461. this.createTexture({ width, height, layers: 1 }, false, false, false, false, false, format, 1, commandEncoder, WebGPUConstants.TextureUsage.CopySrc | WebGPUConstants.TextureUsage.RenderAttachment | WebGPUConstants.TextureUsage.TextureBinding, undefined, "TempTextureForCopyWithInvertY");
  462. const renderPassDescriptor = webgpuHardwareTexture?._copyInvertYRenderPassDescr ?? {
  463. label: `BabylonWebGPUDevice${this._engine.uniqueId}_invertYPreMultiplyAlpha_${format}_${invertY ? "InvertY" : "DontInvertY"}_${premultiplyAlpha ? "PremultiplyAlpha" : "DontPremultiplyAlpha"}`,
  464. colorAttachments: [
  465. {
  466. view: outputTexture.createView({
  467. format,
  468. dimension: WebGPUConstants.TextureViewDimension.E2d,
  469. baseMipLevel: 0,
  470. mipLevelCount: 1,
  471. arrayLayerCount: 1,
  472. baseArrayLayer: 0,
  473. }),
  474. loadOp: WebGPUConstants.LoadOp.Load,
  475. storeOp: WebGPUConstants.StoreOp.Store,
  476. },
  477. ],
  478. };
  479. const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
  480. let bindGroup = useRect ? webgpuHardwareTexture?._copyInvertYBindGroupWithOfst : webgpuHardwareTexture?._copyInvertYBindGroup;
  481. if (!bindGroup) {
  482. const descriptor = {
  483. layout: bindGroupLayout,
  484. entries: [
  485. {
  486. binding: 0,
  487. resource: gpuTexture.createView({
  488. format,
  489. dimension: WebGPUConstants.TextureViewDimension.E2d,
  490. baseMipLevel: mipLevel,
  491. mipLevelCount: 1,
  492. arrayLayerCount: layers,
  493. baseArrayLayer: faceIndex,
  494. }),
  495. },
  496. ],
  497. };
  498. if (useRect) {
  499. descriptor.entries.push({
  500. binding: 1,
  501. resource: {
  502. buffer: this._ubCopyWithOfst,
  503. },
  504. });
  505. }
  506. bindGroup = this._device.createBindGroup(descriptor);
  507. }
  508. passEncoder.setPipeline(pipeline);
  509. passEncoder.setBindGroup(0, bindGroup);
  510. passEncoder.draw(4, 1, 0, 0);
  511. passEncoder.end();
  512. commandEncoder.copyTextureToTexture({
  513. texture: outputTexture,
  514. }, {
  515. texture: gpuTexture,
  516. mipLevel,
  517. origin: {
  518. x: 0,
  519. y: 0,
  520. z: faceIndex,
  521. },
  522. }, {
  523. width,
  524. height,
  525. depthOrArrayLayers: 1,
  526. });
  527. if (webgpuHardwareTexture) {
  528. webgpuHardwareTexture._copyInvertYTempTexture = outputTexture;
  529. webgpuHardwareTexture._copyInvertYRenderPassDescr = renderPassDescriptor;
  530. if (useRect) {
  531. webgpuHardwareTexture._copyInvertYBindGroupWithOfst = bindGroup;
  532. }
  533. else {
  534. webgpuHardwareTexture._copyInvertYBindGroup = bindGroup;
  535. }
  536. }
  537. else {
  538. this._deferredReleaseTextures.push([outputTexture, null]);
  539. }
  540. commandEncoder.popDebugGroup?.();
  541. if (useOwnCommandEncoder) {
  542. this._device.queue.submit([commandEncoder.finish()]);
  543. commandEncoder = null;
  544. }
  545. }
  546. copyWithInvertY(srcTextureView, format, renderPassDescriptor, commandEncoder) {
  547. const useOwnCommandEncoder = commandEncoder === undefined;
  548. const [pipeline, bindGroupLayout] = this._getPipeline(format, PipelineType.InvertYPremultiplyAlpha, { invertY: true, premultiplyAlpha: false });
  549. if (useOwnCommandEncoder) {
  550. commandEncoder = this._device.createCommandEncoder({});
  551. }
  552. commandEncoder.pushDebugGroup?.(`internal copy texture with invertY`);
  553. const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
  554. const bindGroup = this._device.createBindGroup({
  555. layout: bindGroupLayout,
  556. entries: [
  557. {
  558. binding: 0,
  559. resource: srcTextureView,
  560. },
  561. ],
  562. });
  563. passEncoder.setPipeline(pipeline);
  564. passEncoder.setBindGroup(0, bindGroup);
  565. passEncoder.draw(4, 1, 0, 0);
  566. passEncoder.end();
  567. commandEncoder.popDebugGroup?.();
  568. if (useOwnCommandEncoder) {
  569. this._device.queue.submit([commandEncoder.finish()]);
  570. commandEncoder = null;
  571. }
  572. }
  573. //------------------------------------------------------------------------------
  574. // Creation
  575. //------------------------------------------------------------------------------
  576. createTexture(imageBitmap, hasMipmaps = false, generateMipmaps = false, invertY = false, premultiplyAlpha = false, is3D = false, format = WebGPUConstants.TextureFormat.RGBA8Unorm, sampleCount = 1, commandEncoder, usage = -1, additionalUsages = 0, label) {
  577. sampleCount = WebGPUTextureHelper.GetSample(sampleCount);
  578. const layerCount = imageBitmap.layers || 1;
  579. const textureSize = {
  580. width: imageBitmap.width,
  581. height: imageBitmap.height,
  582. depthOrArrayLayers: layerCount,
  583. };
  584. const renderAttachmentFlag = renderableTextureFormatToIndex[format] ? WebGPUConstants.TextureUsage.RenderAttachment : 0;
  585. const isCompressedFormat = WebGPUTextureHelper.IsCompressedFormat(format);
  586. const mipLevelCount = hasMipmaps ? WebGPUTextureHelper.ComputeNumMipmapLevels(imageBitmap.width, imageBitmap.height) : 1;
  587. const usages = usage >= 0 ? usage : WebGPUConstants.TextureUsage.CopySrc | WebGPUConstants.TextureUsage.CopyDst | WebGPUConstants.TextureUsage.TextureBinding;
  588. additionalUsages |= hasMipmaps && !isCompressedFormat ? WebGPUConstants.TextureUsage.CopySrc | renderAttachmentFlag : 0;
  589. if (!isCompressedFormat && !is3D) {
  590. // we don't know in advance if the texture will be updated with copyExternalImageToTexture (which requires to have those flags), so we need to force the flags all the times
  591. additionalUsages |= renderAttachmentFlag | WebGPUConstants.TextureUsage.CopyDst;
  592. }
  593. const gpuTexture = this._device.createTexture({
  594. label: `BabylonWebGPUDevice${this._engine.uniqueId}_Texture${is3D ? "3D" : "2D"}_${label ? label + "_" : ""}${textureSize.width}x${textureSize.height}x${textureSize.depthOrArrayLayers}_${hasMipmaps ? "wmips" : "womips"}_${format}_samples${sampleCount}`,
  595. size: textureSize,
  596. dimension: is3D ? WebGPUConstants.TextureDimension.E3d : WebGPUConstants.TextureDimension.E2d,
  597. format,
  598. usage: usages | additionalUsages,
  599. sampleCount,
  600. mipLevelCount,
  601. });
  602. if (WebGPUTextureHelper.IsImageBitmap(imageBitmap)) {
  603. this.updateTexture(imageBitmap, gpuTexture, imageBitmap.width, imageBitmap.height, layerCount, format, 0, 0, invertY, premultiplyAlpha, 0, 0);
  604. if (hasMipmaps && generateMipmaps) {
  605. this.generateMipmaps(gpuTexture, format, mipLevelCount, 0, is3D, commandEncoder);
  606. }
  607. }
  608. return gpuTexture;
  609. }
  610. createCubeTexture(imageBitmaps, hasMipmaps = false, generateMipmaps = false, invertY = false, premultiplyAlpha = false, format = WebGPUConstants.TextureFormat.RGBA8Unorm, sampleCount = 1, commandEncoder, usage = -1, additionalUsages = 0, label) {
  611. sampleCount = WebGPUTextureHelper.GetSample(sampleCount);
  612. const width = WebGPUTextureHelper.IsImageBitmapArray(imageBitmaps) ? imageBitmaps[0].width : imageBitmaps.width;
  613. const height = WebGPUTextureHelper.IsImageBitmapArray(imageBitmaps) ? imageBitmaps[0].height : imageBitmaps.height;
  614. const renderAttachmentFlag = renderableTextureFormatToIndex[format] ? WebGPUConstants.TextureUsage.RenderAttachment : 0;
  615. const isCompressedFormat = WebGPUTextureHelper.IsCompressedFormat(format);
  616. const mipLevelCount = hasMipmaps ? WebGPUTextureHelper.ComputeNumMipmapLevels(width, height) : 1;
  617. const usages = usage >= 0 ? usage : WebGPUConstants.TextureUsage.CopySrc | WebGPUConstants.TextureUsage.CopyDst | WebGPUConstants.TextureUsage.TextureBinding;
  618. additionalUsages |= hasMipmaps && !isCompressedFormat ? WebGPUConstants.TextureUsage.CopySrc | renderAttachmentFlag : 0;
  619. if (!isCompressedFormat) {
  620. // we don't know in advance if the texture will be updated with copyExternalImageToTexture (which requires to have those flags), so we need to force the flags all the times
  621. additionalUsages |= renderAttachmentFlag | WebGPUConstants.TextureUsage.CopyDst;
  622. }
  623. const gpuTexture = this._device.createTexture({
  624. label: `BabylonWebGPUDevice${this._engine.uniqueId}_TextureCube_${label ? label + "_" : ""}${width}x${height}x6_${hasMipmaps ? "wmips" : "womips"}_${format}_samples${sampleCount}`,
  625. size: {
  626. width,
  627. height,
  628. depthOrArrayLayers: 6,
  629. },
  630. dimension: WebGPUConstants.TextureDimension.E2d,
  631. format,
  632. usage: usages | additionalUsages,
  633. sampleCount,
  634. mipLevelCount,
  635. });
  636. if (WebGPUTextureHelper.IsImageBitmapArray(imageBitmaps)) {
  637. this.updateCubeTextures(imageBitmaps, gpuTexture, width, height, format, invertY, premultiplyAlpha, 0, 0);
  638. if (hasMipmaps && generateMipmaps) {
  639. this.generateCubeMipmaps(gpuTexture, format, mipLevelCount, commandEncoder);
  640. }
  641. }
  642. return gpuTexture;
  643. }
  644. generateCubeMipmaps(gpuTexture, format, mipLevelCount, commandEncoder) {
  645. const useOwnCommandEncoder = commandEncoder === undefined;
  646. if (useOwnCommandEncoder) {
  647. commandEncoder = this._device.createCommandEncoder({});
  648. }
  649. commandEncoder.pushDebugGroup?.(`create cube mipmaps - ${mipLevelCount} levels`);
  650. for (let f = 0; f < 6; ++f) {
  651. this.generateMipmaps(gpuTexture, format, mipLevelCount, f, false, commandEncoder);
  652. }
  653. commandEncoder.popDebugGroup?.();
  654. if (useOwnCommandEncoder) {
  655. this._device.queue.submit([commandEncoder.finish()]);
  656. commandEncoder = null;
  657. }
  658. }
  659. generateMipmaps(gpuOrHdwTexture, format, mipLevelCount, faceIndex = 0, is3D = false, commandEncoder) {
  660. const useOwnCommandEncoder = commandEncoder === undefined;
  661. const [pipeline, bindGroupLayout] = this._getPipeline(format);
  662. faceIndex = Math.max(faceIndex, 0);
  663. if (useOwnCommandEncoder) {
  664. commandEncoder = this._device.createCommandEncoder({});
  665. }
  666. commandEncoder.pushDebugGroup?.(`create mipmaps for face #${faceIndex} - ${mipLevelCount} levels`);
  667. let gpuTexture;
  668. if (WebGPUTextureHelper.IsHardwareTexture(gpuOrHdwTexture)) {
  669. gpuTexture = gpuOrHdwTexture.underlyingResource;
  670. gpuOrHdwTexture._mipmapGenRenderPassDescr = gpuOrHdwTexture._mipmapGenRenderPassDescr || [];
  671. gpuOrHdwTexture._mipmapGenBindGroup = gpuOrHdwTexture._mipmapGenBindGroup || [];
  672. }
  673. else {
  674. gpuTexture = gpuOrHdwTexture;
  675. gpuOrHdwTexture = undefined;
  676. }
  677. if (!gpuTexture) {
  678. return;
  679. }
  680. const webgpuHardwareTexture = gpuOrHdwTexture;
  681. for (let i = 1; i < mipLevelCount; ++i) {
  682. const renderPassDescriptor = webgpuHardwareTexture?._mipmapGenRenderPassDescr[faceIndex]?.[i - 1] ?? {
  683. label: `BabylonWebGPUDevice${this._engine.uniqueId}_generateMipmaps_${format}_faceIndex${faceIndex}_level${i}`,
  684. colorAttachments: [
  685. {
  686. view: gpuTexture.createView({
  687. format,
  688. dimension: is3D ? WebGPUConstants.TextureViewDimension.E3d : WebGPUConstants.TextureViewDimension.E2d,
  689. baseMipLevel: i,
  690. mipLevelCount: 1,
  691. arrayLayerCount: 1,
  692. baseArrayLayer: faceIndex,
  693. }),
  694. loadOp: WebGPUConstants.LoadOp.Load,
  695. storeOp: WebGPUConstants.StoreOp.Store,
  696. },
  697. ],
  698. };
  699. if (webgpuHardwareTexture) {
  700. webgpuHardwareTexture._mipmapGenRenderPassDescr[faceIndex] = webgpuHardwareTexture._mipmapGenRenderPassDescr[faceIndex] || [];
  701. webgpuHardwareTexture._mipmapGenRenderPassDescr[faceIndex][i - 1] = renderPassDescriptor;
  702. }
  703. const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
  704. const bindGroup = webgpuHardwareTexture?._mipmapGenBindGroup[faceIndex]?.[i - 1] ??
  705. this._device.createBindGroup({
  706. layout: bindGroupLayout,
  707. entries: [
  708. {
  709. binding: 0,
  710. resource: this._mipmapSampler,
  711. },
  712. {
  713. binding: 1,
  714. resource: gpuTexture.createView({
  715. format,
  716. dimension: is3D ? WebGPUConstants.TextureViewDimension.E3d : WebGPUConstants.TextureViewDimension.E2d,
  717. baseMipLevel: i - 1,
  718. mipLevelCount: 1,
  719. arrayLayerCount: 1,
  720. baseArrayLayer: faceIndex,
  721. }),
  722. },
  723. ],
  724. });
  725. if (webgpuHardwareTexture) {
  726. webgpuHardwareTexture._mipmapGenBindGroup[faceIndex] = webgpuHardwareTexture._mipmapGenBindGroup[faceIndex] || [];
  727. webgpuHardwareTexture._mipmapGenBindGroup[faceIndex][i - 1] = bindGroup;
  728. }
  729. passEncoder.setPipeline(pipeline);
  730. passEncoder.setBindGroup(0, bindGroup);
  731. passEncoder.draw(4, 1, 0, 0);
  732. passEncoder.end();
  733. }
  734. commandEncoder.popDebugGroup?.();
  735. if (useOwnCommandEncoder) {
  736. this._device.queue.submit([commandEncoder.finish()]);
  737. commandEncoder = null;
  738. }
  739. }
  740. createGPUTextureForInternalTexture(texture, width, height, depth, creationFlags) {
  741. if (!texture._hardwareTexture) {
  742. texture._hardwareTexture = new WebGPUHardwareTexture();
  743. }
  744. if (width === undefined) {
  745. width = texture.width;
  746. }
  747. if (height === undefined) {
  748. height = texture.height;
  749. }
  750. if (depth === undefined) {
  751. depth = texture.depth;
  752. }
  753. const gpuTextureWrapper = texture._hardwareTexture;
  754. const isStorageTexture = ((creationFlags ?? 0) & 1) !== 0;
  755. gpuTextureWrapper.format = WebGPUTextureHelper.GetWebGPUTextureFormat(texture.type, texture.format, texture._useSRGBBuffer);
  756. gpuTextureWrapper.textureUsages =
  757. texture._source === InternalTextureSource.RenderTarget || texture.source === InternalTextureSource.MultiRenderTarget
  758. ? WebGPUConstants.TextureUsage.TextureBinding | WebGPUConstants.TextureUsage.CopySrc | WebGPUConstants.TextureUsage.RenderAttachment
  759. : texture._source === InternalTextureSource.DepthStencil
  760. ? WebGPUConstants.TextureUsage.TextureBinding | WebGPUConstants.TextureUsage.RenderAttachment
  761. : -1;
  762. gpuTextureWrapper.textureAdditionalUsages = isStorageTexture ? WebGPUConstants.TextureUsage.StorageBinding : 0;
  763. const hasMipMaps = texture.generateMipMaps;
  764. const layerCount = depth || 1;
  765. let mipmapCount;
  766. if (texture._maxLodLevel !== null) {
  767. mipmapCount = texture._maxLodLevel;
  768. }
  769. else {
  770. mipmapCount = hasMipMaps ? WebGPUTextureHelper.ComputeNumMipmapLevels(width, height) : 1;
  771. }
  772. if (texture.isCube) {
  773. const gpuTexture = this.createCubeTexture({ width, height }, texture.generateMipMaps, texture.generateMipMaps, texture.invertY, false, gpuTextureWrapper.format, 1, this._commandEncoderForCreation, gpuTextureWrapper.textureUsages, gpuTextureWrapper.textureAdditionalUsages, texture.label);
  774. gpuTextureWrapper.set(gpuTexture);
  775. const arrayLayerCount = texture.is3D ? 1 : layerCount;
  776. const format = WebGPUTextureHelper.GetDepthFormatOnly(gpuTextureWrapper.format);
  777. const aspect = WebGPUTextureHelper.HasDepthAndStencilAspects(gpuTextureWrapper.format) ? WebGPUConstants.TextureAspect.DepthOnly : WebGPUConstants.TextureAspect.All;
  778. const dimension = texture.is2DArray ? WebGPUConstants.TextureViewDimension.CubeArray : WebGPUConstants.TextureViewDimension.Cube;
  779. gpuTextureWrapper.createView({
  780. label: `BabylonWebGPUDevice${this._engine.uniqueId}_TextureViewCube${texture.is2DArray ? "_Array" + arrayLayerCount : ""}_${width}x${height}_${hasMipMaps ? "wmips" : "womips"}_${format}_${dimension}_${aspect}_${texture.label ?? "noname"}`,
  781. format,
  782. dimension,
  783. mipLevelCount: mipmapCount,
  784. baseArrayLayer: 0,
  785. baseMipLevel: 0,
  786. arrayLayerCount: 6,
  787. aspect,
  788. }, isStorageTexture);
  789. }
  790. else {
  791. const gpuTexture = this.createTexture({ width, height, layers: layerCount }, texture.generateMipMaps, texture.generateMipMaps, texture.invertY, false, texture.is3D, gpuTextureWrapper.format, 1, this._commandEncoderForCreation, gpuTextureWrapper.textureUsages, gpuTextureWrapper.textureAdditionalUsages, texture.label);
  792. gpuTextureWrapper.set(gpuTexture);
  793. const arrayLayerCount = texture.is3D ? 1 : layerCount;
  794. const format = WebGPUTextureHelper.GetDepthFormatOnly(gpuTextureWrapper.format);
  795. const aspect = WebGPUTextureHelper.HasDepthAndStencilAspects(gpuTextureWrapper.format) ? WebGPUConstants.TextureAspect.DepthOnly : WebGPUConstants.TextureAspect.All;
  796. const dimension = texture.is2DArray
  797. ? WebGPUConstants.TextureViewDimension.E2dArray
  798. : texture.is3D
  799. ? WebGPUConstants.TextureDimension.E3d
  800. : WebGPUConstants.TextureViewDimension.E2d;
  801. gpuTextureWrapper.createView({
  802. label: `BabylonWebGPUDevice${this._engine.uniqueId}_TextureView${texture.is3D ? "3D" : "2D"}${texture.is2DArray ? "_Array" + arrayLayerCount : ""}_${width}x${height}${texture.is3D ? "x" + layerCount : ""}_${hasMipMaps ? "wmips" : "womips"}_${format}_${dimension}_${aspect}_${texture.label ?? "noname"}`,
  803. format,
  804. dimension,
  805. mipLevelCount: mipmapCount,
  806. baseArrayLayer: 0,
  807. baseMipLevel: 0,
  808. arrayLayerCount,
  809. aspect,
  810. }, isStorageTexture);
  811. }
  812. texture.width = texture.baseWidth = width;
  813. texture.height = texture.baseHeight = height;
  814. texture.depth = texture.baseDepth = depth;
  815. this.createMSAATexture(texture, texture.samples);
  816. return gpuTextureWrapper;
  817. }
  818. createMSAATexture(texture, samples, releaseExisting = true, index = -1) {
  819. const gpuTextureWrapper = texture._hardwareTexture;
  820. if (releaseExisting) {
  821. gpuTextureWrapper?.releaseMSAATexture();
  822. }
  823. if (!gpuTextureWrapper || (samples ?? 1) <= 1) {
  824. return;
  825. }
  826. const width = texture.width;
  827. const height = texture.height;
  828. const gpuMSAATexture = this.createTexture({ width, height, layers: 1 }, false, false, false, false, false, gpuTextureWrapper.format, samples, this._commandEncoderForCreation, WebGPUConstants.TextureUsage.RenderAttachment, 0, texture.label ? "MSAA" + texture.label : undefined);
  829. gpuTextureWrapper.setMSAATexture(gpuMSAATexture, index);
  830. }
  831. //------------------------------------------------------------------------------
  832. // Update
  833. //------------------------------------------------------------------------------
  834. updateCubeTextures(imageBitmaps, gpuTexture, width, height, format, invertY = false, premultiplyAlpha = false, offsetX = 0, offsetY = 0) {
  835. const faces = [0, 3, 1, 4, 2, 5];
  836. for (let f = 0; f < faces.length; ++f) {
  837. const imageBitmap = imageBitmaps[faces[f]];
  838. this.updateTexture(imageBitmap, gpuTexture, width, height, 1, format, f, 0, invertY, premultiplyAlpha, offsetX, offsetY);
  839. }
  840. }
  841. // TODO WEBGPU handle data source not being in the same format than the destination texture?
  842. updateTexture(imageBitmap, texture, width, height, layers, format, faceIndex = 0, mipLevel = 0, invertY = false, premultiplyAlpha = false, offsetX = 0, offsetY = 0, allowGPUOptimization) {
  843. const gpuTexture = WebGPUTextureHelper.IsInternalTexture(texture) ? texture._hardwareTexture.underlyingResource : texture;
  844. const blockInformation = WebGPUTextureHelper.GetBlockInformationFromFormat(format);
  845. const gpuOrHdwTexture = WebGPUTextureHelper.IsInternalTexture(texture) ? texture._hardwareTexture : texture;
  846. const textureCopyView = {
  847. texture: gpuTexture,
  848. origin: {
  849. x: offsetX,
  850. y: offsetY,
  851. z: Math.max(faceIndex, 0),
  852. },
  853. mipLevel: mipLevel,
  854. premultipliedAlpha: premultiplyAlpha,
  855. };
  856. const textureExtent = {
  857. width: Math.ceil(width / blockInformation.width) * blockInformation.width,
  858. height: Math.ceil(height / blockInformation.height) * blockInformation.height,
  859. depthOrArrayLayers: layers || 1,
  860. };
  861. if (imageBitmap.byteLength !== undefined) {
  862. imageBitmap = imageBitmap;
  863. const bytesPerRow = Math.ceil(width / blockInformation.width) * blockInformation.length;
  864. const aligned = Math.ceil(bytesPerRow / 256) * 256 === bytesPerRow;
  865. if (aligned) {
  866. const commandEncoder = this._device.createCommandEncoder({});
  867. const buffer = this._bufferManager.createRawBuffer(imageBitmap.byteLength, WebGPUConstants.BufferUsage.MapWrite | WebGPUConstants.BufferUsage.CopySrc, true, "TempBufferForUpdateTexture" + (gpuTexture ? "_" + gpuTexture.label : ""));
  868. const arrayBuffer = buffer.getMappedRange();
  869. new Uint8Array(arrayBuffer).set(imageBitmap);
  870. buffer.unmap();
  871. commandEncoder.copyBufferToTexture({
  872. buffer: buffer,
  873. offset: 0,
  874. bytesPerRow,
  875. rowsPerImage: height,
  876. }, textureCopyView, textureExtent);
  877. this._device.queue.submit([commandEncoder.finish()]);
  878. this._bufferManager.releaseBuffer(buffer);
  879. }
  880. else {
  881. this._device.queue.writeTexture(textureCopyView, imageBitmap, {
  882. offset: 0,
  883. bytesPerRow,
  884. rowsPerImage: height,
  885. }, textureExtent);
  886. }
  887. if (invertY || premultiplyAlpha) {
  888. if (WebGPUTextureHelper.IsInternalTexture(texture)) {
  889. const dontUseRect = offsetX === 0 && offsetY === 0 && width === texture.width && height === texture.height;
  890. this.invertYPreMultiplyAlpha(gpuOrHdwTexture, texture.width, texture.height, format, invertY, premultiplyAlpha, faceIndex, mipLevel, layers || 1, offsetX, offsetY, dontUseRect ? 0 : width, dontUseRect ? 0 : height, undefined, allowGPUOptimization);
  891. }
  892. else {
  893. // we should never take this code path
  894. // eslint-disable-next-line no-throw-literal
  895. throw "updateTexture: Can't process the texture data because a GPUTexture was provided instead of an InternalTexture!";
  896. }
  897. }
  898. }
  899. else {
  900. imageBitmap = imageBitmap;
  901. if (invertY) {
  902. textureCopyView.premultipliedAlpha = false; // we are going to handle premultiplyAlpha ourselves
  903. // we must preprocess the image
  904. if (WebGPUTextureHelper.IsInternalTexture(texture) && offsetX === 0 && offsetY === 0 && width === texture.width && height === texture.height) {
  905. // optimization when the source image is the same size than the destination texture and offsets X/Y == 0:
  906. // we simply copy the source to the destination and we apply the preprocessing on the destination
  907. this._device.queue.copyExternalImageToTexture({ source: imageBitmap }, textureCopyView, textureExtent);
  908. this.invertYPreMultiplyAlpha(gpuOrHdwTexture, width, height, format, invertY, premultiplyAlpha, faceIndex, mipLevel, layers || 1, 0, 0, 0, 0, undefined, allowGPUOptimization);
  909. }
  910. else {
  911. // we must apply the preprocessing on the source image before copying it into the destination texture
  912. const commandEncoder = this._device.createCommandEncoder({});
  913. // create a temp texture and copy the image to it
  914. const srcTexture = this.createTexture({ width, height, layers: 1 }, false, false, false, false, false, format, 1, commandEncoder, WebGPUConstants.TextureUsage.CopySrc | WebGPUConstants.TextureUsage.TextureBinding, undefined, "TempTextureForUpdateTexture");
  915. this._deferredReleaseTextures.push([srcTexture, null]);
  916. textureExtent.depthOrArrayLayers = 1;
  917. this._device.queue.copyExternalImageToTexture({ source: imageBitmap }, { texture: srcTexture }, textureExtent);
  918. textureExtent.depthOrArrayLayers = layers || 1;
  919. // apply the preprocessing to this temp texture
  920. this.invertYPreMultiplyAlpha(srcTexture, width, height, format, invertY, premultiplyAlpha, faceIndex, mipLevel, layers || 1, 0, 0, 0, 0, commandEncoder, allowGPUOptimization);
  921. // copy the temp texture to the destination texture
  922. commandEncoder.copyTextureToTexture({ texture: srcTexture }, textureCopyView, textureExtent);
  923. this._device.queue.submit([commandEncoder.finish()]);
  924. }
  925. }
  926. else {
  927. // no preprocessing: direct copy to destination texture
  928. this._device.queue.copyExternalImageToTexture({ source: imageBitmap }, textureCopyView, textureExtent);
  929. }
  930. }
  931. }
  932. readPixels(texture, x, y, width, height, format, faceIndex = 0, mipLevel = 0, buffer = null, noDataConversion = false) {
  933. const blockInformation = WebGPUTextureHelper.GetBlockInformationFromFormat(format);
  934. const bytesPerRow = Math.ceil(width / blockInformation.width) * blockInformation.length;
  935. const bytesPerRowAligned = Math.ceil(bytesPerRow / 256) * 256;
  936. const size = bytesPerRowAligned * height;
  937. const gpuBuffer = this._bufferManager.createRawBuffer(size, WebGPUConstants.BufferUsage.MapRead | WebGPUConstants.BufferUsage.CopyDst, undefined, "TempBufferForReadPixels" + (texture.label ? "_" + texture.label : ""));
  938. const commandEncoder = this._device.createCommandEncoder({});
  939. commandEncoder.copyTextureToBuffer({
  940. texture,
  941. mipLevel,
  942. origin: {
  943. x,
  944. y,
  945. z: Math.max(faceIndex, 0),
  946. },
  947. }, {
  948. buffer: gpuBuffer,
  949. offset: 0,
  950. bytesPerRow: bytesPerRowAligned,
  951. }, {
  952. width,
  953. height,
  954. depthOrArrayLayers: 1,
  955. });
  956. this._device.queue.submit([commandEncoder.finish()]);
  957. return this._bufferManager.readDataFromBuffer(gpuBuffer, size, width, height, bytesPerRow, bytesPerRowAligned, WebGPUTextureHelper.GetTextureTypeFromFormat(format), 0, buffer, true, noDataConversion);
  958. }
  959. //------------------------------------------------------------------------------
  960. // Dispose
  961. //------------------------------------------------------------------------------
  962. releaseTexture(texture) {
  963. if (WebGPUTextureHelper.IsInternalTexture(texture)) {
  964. const hardwareTexture = texture._hardwareTexture;
  965. const irradianceTexture = texture._irradianceTexture;
  966. // We can't destroy the objects just now because they could be used in the current frame - we delay the destroying after the end of the frame
  967. this._deferredReleaseTextures.push([hardwareTexture, irradianceTexture]);
  968. }
  969. else {
  970. this._deferredReleaseTextures.push([texture, null]);
  971. }
  972. }
  973. destroyDeferredTextures() {
  974. for (let i = 0; i < this._deferredReleaseTextures.length; ++i) {
  975. const [hardwareTexture, irradianceTexture] = this._deferredReleaseTextures[i];
  976. if (hardwareTexture) {
  977. if (WebGPUTextureHelper.IsHardwareTexture(hardwareTexture)) {
  978. hardwareTexture.release();
  979. }
  980. else {
  981. hardwareTexture.destroy();
  982. }
  983. }
  984. irradianceTexture?.dispose();
  985. }
  986. this._deferredReleaseTextures.length = 0;
  987. }
  988. }
  989. //# sourceMappingURL=webgpuTextureManager.js.map