summit/frontend/node_modules/@monogrid/gainmap-js/dist/encode.umd.min.cjs

6 lines
22 KiB
JavaScript

/**
* @monogrid/gainmap-js v3.4.0
* With ❤️, by MONOGRID <gainmap@monogrid.com>
*/
!function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports,require("three")):"function"==typeof define&&define.amd?define(["exports","three"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self).encode={},e.three)}(this,function(e,t){"use strict";const n=async e=>{if("undefined"==typeof createImageBitmap)throw new Error("createImageBitmap() not supported.");const{source:t,mimeType:n,quality:r,flipY:a}=e;let o;if((t instanceof Uint8Array||t instanceof Uint8ClampedArray)&&"sourceMimeType"in e)o=new Blob([t],{type:e.sourceMimeType});else{if(!(t instanceof ImageData))throw new Error("Invalid source image");o=t}const i=await createImageBitmap(o),s=i.width,p=i.height;let c;"undefined"!=typeof OffscreenCanvas?c=new OffscreenCanvas(s,p):(c=document.createElement("canvas"),c.width=s,c.height=p);const l=c.getContext("2d");if(!l)throw new Error("Failed to create canvas Context");!0===a&&(l.translate(0,p),l.scale(1,-1)),l.drawImage(i,0,0,s,p);const d=await(async(e,t,n)=>{if("undefined"!=typeof OffscreenCanvas&&e instanceof OffscreenCanvas)return e.convertToBlob({type:t,quality:n});if(e instanceof HTMLCanvasElement)return new Promise((r,a)=>{e.toBlob(e=>{e?r(e):a(new Error("Failed to convert canvas to blob"))},t,n)});throw new Error("Unsupported canvas element")})(c,n,r||.9);return{data:new Uint8Array(await d.arrayBuffer()),mimeType:n,width:s,height:p}},r=e=>{let n;if(e instanceof t.DataTexture){if(!(e.image.data instanceof Uint16Array||e.image.data instanceof Float32Array))throw new Error("Provided image is not HDR");n=e}else n=new t.DataTexture(e.data,e.width,e.height,"format"in e?e.format:t.RGBAFormat,e.type,t.UVMapping,t.RepeatWrapping,t.RepeatWrapping,t.LinearFilter,t.LinearFilter,1,"colorSpace"in e&&"srgb"===e.colorSpace?e.colorSpace:t.LinearSRGBColorSpace),"header"in e&&"gamma"in e&&(n.flipY=!0),n.needsUpdate=!0;return n},a=(e,n,r)=>{let a;switch(e){case t.UnsignedByteType:a=new Uint8ClampedArray(n*r*4);break;case t.HalfFloatType:a=new Uint16Array(n*r*4);break;case t.UnsignedIntType:a=new Uint32Array(n*r*4);break;case t.ByteType:a=new Int8Array(n*r*4);break;case t.ShortType:a=new Int16Array(n*r*4);break;case t.IntType:a=new Int32Array(n*r*4);break;case t.FloatType:a=new Float32Array(n*r*4);break;default:throw new Error("Unsupported data type")}return a};let o;class i{_renderer;_rendererIsDisposable=!1;_material;_scene;_camera;_quad;_renderTarget;_width;_height;_type;_colorSpace;_supportsReadPixels=!0;constructor(e){this._width=e.width,this._height=e.height,this._type=e.type,this._colorSpace=e.colorSpace;const n={format:t.RGBAFormat,depthBuffer:!1,stencilBuffer:!1,type:this._type,colorSpace:this._colorSpace,anisotropy:void 0!==e.renderTargetOptions?.anisotropy?e.renderTargetOptions?.anisotropy:1,generateMipmaps:void 0!==e.renderTargetOptions?.generateMipmaps&&e.renderTargetOptions?.generateMipmaps,magFilter:void 0!==e.renderTargetOptions?.magFilter?e.renderTargetOptions?.magFilter:t.LinearFilter,minFilter:void 0!==e.renderTargetOptions?.minFilter?e.renderTargetOptions?.minFilter:t.LinearFilter,samples:void 0!==e.renderTargetOptions?.samples?e.renderTargetOptions?.samples:void 0,wrapS:void 0!==e.renderTargetOptions?.wrapS?e.renderTargetOptions?.wrapS:t.ClampToEdgeWrapping,wrapT:void 0!==e.renderTargetOptions?.wrapT?e.renderTargetOptions?.wrapT:t.ClampToEdgeWrapping};if(this._material=e.material,e.renderer?this._renderer=e.renderer:(this._renderer=i.instantiateRenderer(),this._rendererIsDisposable=!0),this._scene=new t.Scene,this._camera=new t.OrthographicCamera,this._camera.position.set(0,0,10),this._camera.left=-.5,this._camera.right=.5,this._camera.top=.5,this._camera.bottom=-.5,this._camera.updateProjectionMatrix(),!((e,n,r,i)=>{if(void 0!==o)return o;const s=new t.WebGLRenderTarget(1,1,i);n.setRenderTarget(s);const p=new t.Mesh(new t.PlaneGeometry,new t.MeshBasicMaterial({color:16777215}));n.render(p,r),n.setRenderTarget(null);const c=a(e,s.width,s.height);return n.readRenderTargetPixels(s,0,0,s.width,s.height,c),s.dispose(),p.geometry.dispose(),p.material.dispose(),o=0!==c[0],o})(this._type,this._renderer,this._camera,n)){let e;if(this._type===t.HalfFloatType)e=this._renderer.extensions.has("EXT_color_buffer_float")?t.FloatType:void 0;void 0!==e?(console.warn(`This browser does not support reading pixels from ${this._type} RenderTargets, switching to ${t.FloatType}`),this._type=e):(this._supportsReadPixels=!1,console.warn("This browser dos not support toArray or toDataTexture, calls to those methods will result in an error thrown"))}this._quad=new t.Mesh(new t.PlaneGeometry,this._material),this._quad.geometry.computeBoundingBox(),this._scene.add(this._quad),this._renderTarget=new t.WebGLRenderTarget(this.width,this.height,n),this._renderTarget.texture.mapping=void 0!==e.renderTargetOptions?.mapping?e.renderTargetOptions?.mapping:t.UVMapping}static instantiateRenderer(){const e=new t.WebGLRenderer;return e.setSize(128,128),e}render=()=>{this._renderer.setRenderTarget(this._renderTarget);try{this._renderer.render(this._scene,this._camera)}catch(e){throw this._renderer.setRenderTarget(null),e}this._renderer.setRenderTarget(null)};toArray(){if(!this._supportsReadPixels)throw new Error("Can't read pixels in this browser");const e=a(this._type,this._width,this._height);return this._renderer.readRenderTargetPixels(this._renderTarget,0,0,this._width,this._height,e),e}toDataTexture(e){const n=new t.DataTexture(this.toArray(),this.width,this.height,t.RGBAFormat,this._type,e?.mapping||t.UVMapping,e?.wrapS||t.ClampToEdgeWrapping,e?.wrapT||t.ClampToEdgeWrapping,e?.magFilter||t.LinearFilter,e?.minFilter||t.LinearFilter,e?.anisotropy||1,t.LinearSRGBColorSpace);return n.generateMipmaps=void 0!==e?.generateMipmaps&&e?.generateMipmaps,n}disposeOnDemandRenderer(){this._renderer.setRenderTarget(null),this._rendererIsDisposable&&(this._renderer.dispose(),this._renderer.forceContextLoss())}dispose(e){this.disposeOnDemandRenderer(),e&&this.renderTarget.dispose(),this.material instanceof t.ShaderMaterial&&Object.values(this.material.uniforms).forEach(e=>{e.value instanceof t.Texture&&e.value.dispose()}),Object.values(this.material).forEach(e=>{e instanceof t.Texture&&e.dispose()}),this.material.dispose(),this._quad.geometry.dispose()}get width(){return this._width}set width(e){this._width=e,this._renderTarget.setSize(this._width,this._height)}get height(){return this._height}set height(e){this._height=e,this._renderTarget.setSize(this._width,this._height)}get renderer(){return this._renderer}get renderTarget(){return this._renderTarget}set renderTarget(e){this._renderTarget=e,this._width=e.width,this._height=e.height}get material(){return this._material}get type(){return this._type}get colorSpace(){return this._colorSpace}}class s extends t.ShaderMaterial{_minContentBoost;_maxContentBoost;_offsetSdr;_offsetHdr;_gamma;constructor({sdr:e,hdr:n,offsetSdr:r,offsetHdr:a,maxContentBoost:o,minContentBoost:i,gamma:s}){if(!o)throw new Error("maxContentBoost is required");if(!e)throw new Error("sdr is required");if(!n)throw new Error("hdr is required");const p=s||[1,1,1],c=r||[1/64,1/64,1/64],l=a||[1/64,1/64,1/64],d=i||1,m=Math.max(o,1.0001);super({name:"GainMapEncoderMaterial",vertexShader:"\nvarying vec2 vUv;\n\nvoid main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n",fragmentShader:"\n#ifndef saturate\n#define saturate( a ) clamp( a, 0.0, 1.0 )\n#endif\nuniform sampler2D sdr;\nuniform sampler2D hdr;\nuniform vec3 gamma;\nuniform vec3 offsetSdr;\nuniform vec3 offsetHdr;\nuniform float minLog2;\nuniform float maxLog2;\n\nvarying vec2 vUv;\n\nvoid main() {\n vec3 sdrColor = texture2D(sdr, vUv).rgb;\n vec3 hdrColor = texture2D(hdr, vUv).rgb;\n\n vec3 pixelGain = (hdrColor + offsetHdr) / (sdrColor + offsetSdr);\n vec3 logRecovery = (log2(pixelGain) - minLog2) / (maxLog2 - minLog2);\n vec3 clampedRecovery = saturate(logRecovery);\n gl_FragColor = vec4(pow(clampedRecovery, gamma), 1.0);\n}\n",uniforms:{sdr:{value:e},hdr:{value:n},gamma:{value:(new t.Vector3).fromArray(p)},offsetSdr:{value:(new t.Vector3).fromArray(c)},offsetHdr:{value:(new t.Vector3).fromArray(l)},minLog2:{value:Math.log2(d)},maxLog2:{value:Math.log2(m)}},blending:t.NoBlending,depthTest:!1,depthWrite:!1}),this._minContentBoost=d,this._maxContentBoost=m,this._offsetSdr=c,this._offsetHdr=l,this._gamma=p,this.needsUpdate=!0,this.uniformsNeedUpdate=!0}get gamma(){return this._gamma}set gamma(e){this._gamma=e,this.uniforms.gamma.value=(new t.Vector3).fromArray(e)}get offsetHdr(){return this._offsetHdr}set offsetHdr(e){this._offsetHdr=e,this.uniforms.offsetHdr.value=(new t.Vector3).fromArray(e)}get offsetSdr(){return this._offsetSdr}set offsetSdr(e){this._offsetSdr=e,this.uniforms.offsetSdr.value=(new t.Vector3).fromArray(e)}get minContentBoost(){return this._minContentBoost}set minContentBoost(e){this._minContentBoost=e,this.uniforms.minLog2.value=Math.log2(e)}get maxContentBoost(){return this._maxContentBoost}set maxContentBoost(e){this._maxContentBoost=e,this.uniforms.maxLog2.value=Math.log2(e)}get gainMapMin(){return[Math.log2(this._minContentBoost),Math.log2(this._minContentBoost),Math.log2(this._minContentBoost)]}get gainMapMax(){return[Math.log2(this._maxContentBoost),Math.log2(this._maxContentBoost),Math.log2(this._maxContentBoost)]}get hdrCapacityMin(){return Math.min(Math.max(0,this.gainMapMin[0]),Math.max(0,this.gainMapMin[1]),Math.max(0,this.gainMapMin[2]))}get hdrCapacityMax(){return Math.max(Math.max(0,this.gainMapMax[0]),Math.max(0,this.gainMapMax[1]),Math.max(0,this.gainMapMax[2]))}}const p=e=>{const{image:n,sdr:a,renderer:o}=e,p=r(n),c=new s({...e,sdr:a.renderTarget.texture,hdr:p}),l=new i({width:p.image.width,height:p.image.height,type:t.UnsignedByteType,colorSpace:t.LinearSRGBColorSpace,material:c,renderer:o,renderTargetOptions:e.renderTargetOptions});try{l.render()}catch(e){throw l.disposeOnDemandRenderer(),e}return l};class c extends t.ShaderMaterial{_brightness=0;_contrast=1;_saturation=1;_exposure=1;_toneMapping;_map;constructor({map:e,toneMapping:n}){super({name:"SDRMaterial",vertexShader:"\nvarying vec2 vUv;\n\nvoid main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n",fragmentShader:"\n#ifndef saturate\n#define saturate( a ) clamp( a, 0.0, 1.0 )\n#endif\n\nuniform sampler2D map;\nuniform float brightness;\nuniform float contrast;\nuniform float saturation;\nuniform float exposure;\n\nvarying vec2 vUv;\n\nmat4 brightnessMatrix( float brightness ) {\n return mat4(\n 1, 0, 0, 0,\n 0, 1, 0, 0,\n 0, 0, 1, 0,\n brightness, brightness, brightness, 1\n );\n}\n\nmat4 contrastMatrix( float contrast ) {\n float t = ( 1.0 - contrast ) / 2.0;\n return mat4(\n contrast, 0, 0, 0,\n 0, contrast, 0, 0,\n 0, 0, contrast, 0,\n t, t, t, 1\n );\n}\n\nmat4 saturationMatrix( float saturation ) {\n vec3 luminance = vec3( 0.3086, 0.6094, 0.0820 );\n float oneMinusSat = 1.0 - saturation;\n vec3 red = vec3( luminance.x * oneMinusSat );\n red+= vec3( saturation, 0, 0 );\n vec3 green = vec3( luminance.y * oneMinusSat );\n green += vec3( 0, saturation, 0 );\n vec3 blue = vec3( luminance.z * oneMinusSat );\n blue += vec3( 0, 0, saturation );\n return mat4(\n red, 0,\n green, 0,\n blue, 0,\n 0, 0, 0, 1\n );\n}\n\nvec3 RRTAndODTFit( vec3 v ) {\n vec3 a = v * ( v + 0.0245786 ) - 0.000090537;\n vec3 b = v * ( 0.983729 * v + 0.4329510 ) + 0.238081;\n return a / b;\n}\n\nvec3 ACESFilmicToneMapping( vec3 color ) {\n // sRGB => XYZ => D65_2_D60 => AP1 => RRT_SAT\n const mat3 ACESInputMat = mat3(\n vec3( 0.59719, 0.07600, 0.02840 ), // transposed from source\n vec3( 0.35458, 0.90834, 0.13383 ),\n vec3( 0.04823, 0.01566, 0.83777 )\n );\n // ODT_SAT => XYZ => D60_2_D65 => sRGB\n const mat3 ACESOutputMat = mat3(\n vec3( 1.60475, -0.10208, -0.00327 ), // transposed from source\n vec3( -0.53108, 1.10813, -0.07276 ),\n vec3( -0.07367, -0.00605, 1.07602 )\n );\n color = ACESInputMat * color;\n // Apply RRT and ODT\n color = RRTAndODTFit( color );\n color = ACESOutputMat * color;\n // Clamp to [0, 1]\n return saturate( color );\n}\n\n// source: https://www.cs.utah.edu/docs/techreports/2002/pdf/UUCS-02-001.pdf\nvec3 ReinhardToneMapping( vec3 color ) {\n return saturate( color / ( vec3( 1.0 ) + color ) );\n}\n\n// source: http://filmicworlds.com/blog/filmic-tonemapping-operators/\nvec3 CineonToneMapping( vec3 color ) {\n // optimized filmic operator by Jim Hejl and Richard Burgess-Dawson\n color = max( vec3( 0.0 ), color - 0.004 );\n return pow( ( color * ( 6.2 * color + 0.5 ) ) / ( color * ( 6.2 * color + 1.7 ) + 0.06 ), vec3( 2.2 ) );\n}\n\n// nothing\nvec3 LinearToneMapping ( vec3 color ) {\n return color;\n}\n\n// Matrices for rec 2020 <> rec 709 color space conversion\n// matrix provided in row-major order so it has been transposed\n// https://www.itu.int/pub/R-REP-BT.2407-2017\nconst mat3 LINEAR_REC2020_TO_LINEAR_SRGB = mat3(\n vec3( 1.6605, - 0.1246, - 0.0182 ),\n vec3( - 0.5876, 1.1329, - 0.1006 ),\n vec3( - 0.0728, - 0.0083, 1.1187 )\n);\n\nconst mat3 LINEAR_SRGB_TO_LINEAR_REC2020 = mat3(\n vec3( 0.6274, 0.0691, 0.0164 ),\n vec3( 0.3293, 0.9195, 0.0880 ),\n vec3( 0.0433, 0.0113, 0.8956 )\n);\n\n// https://iolite-engine.com/blog_posts/minimal_agx_implementation\n// Mean error^2: 3.6705141e-06\nvec3 agxDefaultContrastApprox( vec3 x ) {\n vec3 x2 = x * x;\n vec3 x4 = x2 * x2;\n return + 15.5 * x4 * x2\n - 40.14 * x4 * x\n + 31.96 * x4\n - 6.868 * x2 * x\n + 0.4298 * x2\n + 0.1191 * x\n - 0.00232;\n}\n\n// AgX Tone Mapping implementation based on Filament, which in turn is based\n// on Blender's implementation using rec 2020 primaries\n// https://github.com/google/filament/pull/7236\n// Inputs and outputs are encoded as Linear-sRGB.\n\nvec3 AgXToneMapping( vec3 color ) {\n\n // AgX constants\n const mat3 AgXInsetMatrix = mat3(\n vec3( 0.856627153315983, 0.137318972929847, 0.11189821299995 ),\n vec3( 0.0951212405381588, 0.761241990602591, 0.0767994186031903 ),\n vec3( 0.0482516061458583, 0.101439036467562, 0.811302368396859 )\n );\n\n // explicit AgXOutsetMatrix generated from Filaments AgXOutsetMatrixInv\n const mat3 AgXOutsetMatrix = mat3(\n vec3( 1.1271005818144368, - 0.1413297634984383, - 0.14132976349843826 ),\n vec3( - 0.11060664309660323, 1.157823702216272, - 0.11060664309660294 ),\n vec3( - 0.016493938717834573, - 0.016493938717834257, 1.2519364065950405 )\n );\n\n // LOG2_MIN = -10.0\n // LOG2_MAX = +6.5\n // MIDDLE_GRAY = 0.18\n const float AgxMinEv = - 12.47393; // log2( pow( 2, LOG2_MIN ) * MIDDLE_GRAY )\n const float AgxMaxEv = 4.026069; // log2( pow( 2, LOG2_MAX ) * MIDDLE_GRAY )\n\n color = LINEAR_SRGB_TO_LINEAR_REC2020 * color;\n\n color = AgXInsetMatrix * color;\n\n // Log2 encoding\n color = max( color, 1e-10 ); // avoid 0 or negative numbers for log2\n color = log2( color );\n color = ( color - AgxMinEv ) / ( AgxMaxEv - AgxMinEv );\n\n color = clamp( color, 0.0, 1.0 );\n\n // Apply sigmoid\n color = agxDefaultContrastApprox( color );\n\n // Apply AgX look\n // v = agxLook(v, look);\n\n color = AgXOutsetMatrix * color;\n\n // Linearize\n color = pow( max( vec3( 0.0 ), color ), vec3( 2.2 ) );\n\n color = LINEAR_REC2020_TO_LINEAR_SRGB * color;\n\n // Gamut mapping. Simple clamp for now.\n color = clamp( color, 0.0, 1.0 );\n\n return color;\n\n}\n\n// https://modelviewer.dev/examples/tone-mapping\n\nvec3 NeutralToneMapping( vec3 color ) {\n\n const float StartCompression = 0.8 - 0.04;\n const float Desaturation = 0.15;\n\n float x = min( color.r, min( color.g, color.b ) );\n\n float offset = x < 0.08 ? x - 6.25 * x * x : 0.04;\n\n color -= offset;\n\n float peak = max( color.r, max( color.g, color.b ) );\n\n if ( peak < StartCompression ) return color;\n\n float d = 1. - StartCompression;\n\n float newPeak = 1. - d * d / ( peak + d - StartCompression );\n\n color *= newPeak / peak;\n\n float g = 1. - 1. / ( Desaturation * ( peak - newPeak ) + 1. );\n\n return mix( color, vec3( newPeak ), g );\n\n}\n\n\n\nvoid main() {\n vec4 color = texture2D(map, vUv);\n\n vec4 exposed = vec4(exposure * color.rgb, color.a);\n\n vec4 tonemapped = vec4(TONEMAPPING_FUNCTION(exposed.rgb), color.a);\n\n vec4 adjusted =\n brightnessMatrix( brightness ) *\n contrastMatrix( contrast ) *\n saturationMatrix( saturation ) *\n tonemapped;\n\n gl_FragColor = adjusted;\n}\n",uniforms:{map:{value:e},brightness:{value:0},contrast:{value:1},saturation:{value:1},exposure:{value:1}},blending:t.NoBlending,depthTest:!1,depthWrite:!1}),this._map=e,this.toneMapping=this._toneMapping=n||t.ACESFilmicToneMapping,this.needsUpdate=!0,this.uniformsNeedUpdate=!0}get toneMapping(){return this._toneMapping}set toneMapping(e){let n=!1;switch(e){case t.ACESFilmicToneMapping:this.defines.TONEMAPPING_FUNCTION="ACESFilmicToneMapping",n=!0;break;case t.ReinhardToneMapping:this.defines.TONEMAPPING_FUNCTION="ReinhardToneMapping",n=!0;break;case t.CineonToneMapping:this.defines.TONEMAPPING_FUNCTION="CineonToneMapping",n=!0;break;case t.LinearToneMapping:this.defines.TONEMAPPING_FUNCTION="LinearToneMapping",n=!0;break;case t.AgXToneMapping:this.defines.TONEMAPPING_FUNCTION="AgXToneMapping",n=!0;break;case t.NeutralToneMapping:this.defines.TONEMAPPING_FUNCTION="NeutralToneMapping",n=!0;break;default:console.error(`Unsupported toneMapping: ${e}. Using LinearToneMapping.`),this.defines.TONEMAPPING_FUNCTION="LinearToneMapping",this._toneMapping=t.LinearToneMapping}n&&(this._toneMapping=e),this.needsUpdate=!0}get brightness(){return this._brightness}set brightness(e){this._brightness=e,this.uniforms.brightness.value=e}get contrast(){return this._contrast}set contrast(e){this._contrast=e,this.uniforms.contrast.value=e}get saturation(){return this._saturation}set saturation(e){this._saturation=e,this.uniforms.saturation.value=e}get exposure(){return this._exposure}set exposure(e){this._exposure=e,this.uniforms.exposure.value=e}get map(){return this._map}set map(e){this._map=e,this.uniforms.map.value=e}}const l=(e,n,r,a)=>{e.needsUpdate=!0;const o=new i({width:e.image.width,height:e.image.height,type:t.UnsignedByteType,colorSpace:t.SRGBColorSpace,material:new c({map:e,toneMapping:r}),renderer:n,renderTargetOptions:a});try{o.render()}catch(e){throw o.disposeOnDemandRenderer(),e}return o},d=e=>{const{image:t,renderer:n}=e,a=r(t),o=l(a,n,e.toneMapping,e.renderTargetOptions),i=p({...e,image:a,sdr:o,renderer:o.renderer});return{sdr:o,gainMap:i,hdr:a,getMetadata:()=>({gainMapMax:i.material.gainMapMax,gainMapMin:i.material.gainMapMin,gamma:i.material.gamma,hdrCapacityMax:i.material.hdrCapacityMax,hdrCapacityMin:i.material.hdrCapacityMin,offsetHdr:i.material.offsetHdr,offsetSdr:i.material.offsetSdr})}};e.GainMapEncoderMaterial=s,e.SDRMaterial=c,e.compress=n,e.encode=d,e.encodeAndCompress=async e=>{const t=d(e),{mimeType:r,quality:a,flipY:o,withWorker:i}=e;let s,p,c;const l=new ImageData(t.sdr.toArray(),t.sdr.width,t.sdr.height),m=new ImageData(t.gainMap.toArray(),t.gainMap.width,t.gainMap.height);if(i){const e=await Promise.all([i.compress({source:l,mimeType:r,quality:a,flipY:o}),i.compress({source:m,mimeType:r,quality:a,flipY:o})]);s=e,p=e[0].source,c=e[1].source}else s=await Promise.all([n({source:l,mimeType:r,quality:a,flipY:o}),n({source:m,mimeType:r,quality:a,flipY:o})]),p=l.data,c=m.data;return t.sdr.dispose(),t.gainMap.dispose(),{...t,...t.getMetadata(),sdr:s[0],gainMap:s[1],rawSDR:p,rawGainMap:c}},e.findTextureMinMax=(e,n="max",a)=>{const o=r(e);if(o.format!==t.AlphaFormat&&o.format!==t.RGBFormat&&o.format!==t.RGBAFormat&&o.format!==t.DepthFormat&&o.format!==t.DepthStencilFormat&&o.format!==t.RedFormat&&o.format!==t.RedIntegerFormat&&o.format!==t.RGFormat&&o.format!==t.RGIntegerFormat&&o.format!==t.RGBIntegerFormat&&o.format!==t.RGBAIntegerFormat)throw new Error("Unsupported texture format");const s=new t.ShaderMaterial({vertexShader:"\nvarying vec2 vUv;\nvoid main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4(position, 1.0);\n}\n",fragmentShader:"\nprecision mediump float;\n\n#ifndef CELL_SIZE\n #define CELL_SIZE 2\n#endif\n\n#ifndef COMPARE_FUNCTION\n #define COMPARE_FUNCTION max\n#endif\n\n#ifndef INITIAL_VALUE\n #define INITIAL_VALUE 0\n#endif\n\nuniform sampler2D map;\nuniform vec2 u_srcResolution;\n\nvarying vec2 vUv;\n\nvoid main() {\n // compute the first pixel the source cell\n vec2 srcPixel = floor(gl_FragCoord.xy) * float(CELL_SIZE);\n\n // one pixel in source\n vec2 onePixel = vec2(1) / u_srcResolution;\n\n // uv for first pixel in cell. +0.5 for center of pixel\n vec2 uv = (srcPixel + 0.5) * onePixel;\n\n vec4 resultColor = vec4(INITIAL_VALUE);\n\n for (int y = 0; y < CELL_SIZE; ++y) {\n for (int x = 0; x < CELL_SIZE; ++x) {\n resultColor = COMPARE_FUNCTION(resultColor, texture2D(map, uv + vec2(x, y) * onePixel));\n }\n }\n\n gl_FragColor = resultColor;\n}\n",uniforms:{u_srcResolution:{value:new t.Vector2(o.image.width,o.image.height)},map:{value:o}},defines:{CELL_SIZE:2,COMPARE_FUNCTION:n,INITIAL_VALUE:"max"===n?0:65504}});o.needsUpdate=!0,s.needsUpdate=!0;let p=o.image.width,c=o.image.height;const l=new i({width:p,height:c,type:o.type,colorSpace:o.colorSpace,material:s,renderer:a}),d=[];for(;p>1||c>1;){p=Math.max(1,(p+2-1)/2|0),c=Math.max(1,(c+2-1)/2|0);const e=new t.WebGLRenderTarget(p,c,{type:l.type,format:o.format,colorSpace:l.colorSpace,minFilter:t.NearestFilter,magFilter:t.NearestFilter,wrapS:t.ClampToEdgeWrapping,wrapT:t.ClampToEdgeWrapping,generateMipmaps:!1,depthBuffer:!1,stencilBuffer:!1});d.push(e)}p=o.image.width,c=o.image.height,d.forEach(e=>{p=Math.max(1,(p+2-1)/2|0),c=Math.max(1,(c+2-1)/2|0),l.renderTarget=e,l.render(),s.uniforms.map.value=e.texture,s.uniforms.u_srcResolution.value.x=p,s.uniforms.u_srcResolution.value.y=c});const m=l.toArray();return l.dispose(),d.forEach(e=>e.dispose()),[l.type===t.FloatType?m[0]:t.DataUtils.fromHalfFloat(m[0]),l.type===t.FloatType?m[1]:t.DataUtils.fromHalfFloat(m[1]),l.type===t.FloatType?m[2]:t.DataUtils.fromHalfFloat(m[2])]},e.getGainMap=p,e.getSDRRendition=l});