, `keyupbasic textured quad (nearest neighbour) - CodeSandbox, `keybasic textured quad (nearest neighbour) - CodeSandbox, `mousemovebasic textured quad (nearest neighbour) - CodeSandbox, `mousemovenormalisedbasic textured quad (nearest neighbour) - CodeSandbox and `mousemovedeltabasic textured quad (nearest neighbour) - CodeSandbox.\n\n```tsx\nconst angle = useObservable(0)\n \n\u002F\u002F ...\n\nuseEffect(() =\u003E {\n const s = combineLatest([\n mousemovenormalised$.pipe(\n sampleTime(100),\n map(([x]) =\u003E Math.PI * 2 * Math.sin(x * Math.PI)),\n ),\n interval(100),\n ])\n .pipe(map(([angle, time]) =\u003E angle + time \u002F 5.0))\n .subscribe(angle.set)\n return () =\u003E s.unsubscribe()\n}, [angle])\n```\n","id":"mod_3suPsKB36QC5Vuhci7ZooM","is_binary":false,"title":"README.md","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SkcH6sHiWK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":null},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fdither_matrix.png","id":"mod_GbSEidrsuZVRG2oUWmKyK9","is_binary":true,"title":"dither_matrix.png","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rJJMBpjHoZt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_rising_sun.png","id":"mod_Ao8dMhvpfX5kQUCQ692Le4","is_binary":true,"title":"palette_rising_sun.png","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BkxMSpsHs-Y","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_moonlight.png","id":"mod_Ljgt3Yyi8PRX1y9dGPPYLV","is_binary":true,"title":"palette_moonlight.png","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BkbzS6oSobt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fblue_noise.png","id":"mod_JgeVJzddWoSfhQt8vvyf8x","is_binary":true,"title":"blue_noise.png","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HkMGHasrjZF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fblade_diffuse.jpg","id":"mod_JXS1sK86e9YgJ4WbwEuW6n","is_binary":true,"title":"blade_diffuse.jpg","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"ry7GH6iribK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fseamless7.png","id":"mod_Xa8hYEaKmNsZRy259EXC1r","is_binary":true,"title":"seamless7.png","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"B1VGHTjBoZt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_twilight.png","id":"mod_79paFvYcMFHYUsQ6pfvvEC","is_binary":true,"title":"palette_twilight.png","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"B1SGS6oriWF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fblade_alpha.jpg","id":"mod_8Zwg78u563VRfnDFbGDyCQ","is_binary":true,"title":"blade_alpha.jpg","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BJIzBTjrs-Y","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fwater-0397b.jpeg","id":"mod_PbKug8YG8SC3wqJpe7cAaH","is_binary":true,"title":"water-0397b.jpeg","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HyPGB6jrs-t","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fseamless6.png","id":"mod_FQAzGgzx8evampg5e6BJKN","is_binary":true,"title":"seamless6.png","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"ByOzrpirjZt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fseamless8.png","id":"mod_M754ED7ZXyGZ1nQ2yr6zvP","is_binary":true,"title":"seamless8.png","sha":null,"inserted_at":"2021-11-05T04:22:30","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BkFfH6jrsbt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fseamless5.png","id":"mod_BF8RJX2JDMRrEXxrSP3utH","is_binary":true,"title":"seamless5.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BJcGHasSj-Y","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"{\n \"extends\": \"react-app\",\n \"rules\": {\n \"semi\": [2, \"never\"],\n \"no-unexpected-multiline\": \"error\"\n }\n }","id":"mod_Qoq3frN8KWySpJbGcRVSDA","is_binary":false,"title":".eslintrc","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"Bk8SaiSoWK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":null},{"code":"node_modules\nyarn.lock\n","id":"mod_Vdpztvs8tAw2jkDswEFns3","is_binary":false,"title":".gitignore","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BkDS6iBiWF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":null},{"code":"{\n \"printWidth\": 160,\n \"tabWidth\": 2,\n \"useTabs\": false,\n \"semi\": false,\n \"singleQuote\": false,\n \"trailingComma\": \"all\",\n \"bracketSpacing\": true,\n \"jsxBracketSameLine\": true,\n \"fluid\": false\n}","id":"mod_uWzSiyKE4t1PE1LdeWiCE","is_binary":false,"title":".prettierrc","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"r1OHTjri-K","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":null},{"code":"import * as THREE from \"three\"\nimport { shaderMaterial } from \"@react-three\u002Fdrei\"\nimport { extend } from \"@react-three\u002Ffiber\"\n\nconst ShinyMaterial = shaderMaterial(\n { baseSpeed: 0.05, noiseTexture: null, noiseScale: 0.5337, alpha: 1.0, time: 10.0 },\n `\n varying vec2 vUv;\n void main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n }\n `,\n `\n uniform sampler2D baseTexture;\n uniform float baseSpeed;\n uniform sampler2D noiseTexture;\n uniform float noiseScale;\n uniform float alpha;\n uniform float time;\n uniform vec2 resolution;\n\n varying vec2 vUv;\n void main()\n {\n vec2 uvTimeShift = vUv + vec2( -0.3, 1.5 ) * time * baseSpeed;\n vec4 noiseGeneratorTimeShift = texture2D( noiseTexture, uvTimeShift );\n vec2 uvNoiseTimeShift = vUv + noiseScale * vec2( noiseGeneratorTimeShift.r, noiseGeneratorTimeShift.b );\n vec4 baseColor = texture2D( baseTexture, uvNoiseTimeShift );\n\n baseColor.a = alpha;\n gl_FragColor = baseColor;\n }\n `,\n (self) =\u003E {\n self.side = THREE.DoubleSide\n },\n)\n\nextend({ ShinyMaterial })\n","id":"mod_AWg3u4bjMBLtYNZZtXmF8x","is_binary":false,"title":"ShinyMaterial.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HJIlBpoBoWt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"ByZHpsHjbt"},{"code":"\u002F\u002F\u002F \u003Creference types=\"react-scripts\" \u002F\u003E\n","id":"mod_7MRqULLZ3Q2wbuYJnJ915g","is_binary":false,"title":"react-app-env.d.ts","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HkPlr6orsWK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyxSToBs-F"},{"code":"import { Mesh, OrthographicCamera, PlaneBufferGeometry, Scene, ShaderMaterial, UniformsUtils, Vector2 } from \"three\"\nimport { DIMENSIONS } from \"..\u002Fconst\"\n\nexport const CrtShader = {\n uniforms: {\n tDiffuse: { value: null },\n iResolution: {\n type: \"vec2\",\n value: new Vector2(DIMENSIONS.width - 3, DIMENSIONS.height - 2),\n },\n },\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n \u002F\u002F #include \u003Ccommon\u003E\n\t\tuniform sampler2D tDiffuse;\n\t\tvarying vec2 vUv;\n uniform vec2 iResolution;\n\n \u002F\u002F\n\u002F\u002F PUBLIC DOMAIN CRT STYLED SCAN-LINE SHADER\n\u002F\u002F\n\u002F\u002F by Timothy Lottes\n\u002F\u002F\n\u002F\u002F This is more along the style of a really good CGA arcade monitor.\n\u002F\u002F With RGB inputs instead of NTSC.\n\u002F\u002F The shadow mask example has the mask rotated 90 degrees for less chromatic aberration.\n\u002F\u002F\n\u002F\u002F Left it unoptimized to show the theory behind the algorithm.\n\u002F\u002F\n\u002F\u002F It is an example what I personally would want as a display option for pixel art games.\n\u002F\u002F Please take and use, change, or whatever.\n\u002F\u002F\n\n\u002F\u002F Emulated input resolution.\n#if 0\n \u002F\u002F Fix resolution to set amount.\n #define res (vec2(320.0\u002F1.0,160.0\u002F1.0))\n#else\n \u002F\u002F Optimize for resize.\n #define res (iResolution.xy\u002F6.0)\n#endif\n\n\u002F\u002F Hardness of scanline.\n\u002F\u002F -8.0 = soft\n\u002F\u002F -16.0 = medium\nfloat hardScan=-8.0;\n\n\u002F\u002F Hardness of pixels in scanline.\n\u002F\u002F -2.0 = soft\n\u002F\u002F -4.0 = hard\nfloat hardPix=-3.0;\n\n\u002F\u002F Display warp.\n\u002F\u002F 0.0 = none\n\u002F\u002F 1.0\u002F8.0 = extreme\nvec2 warp=vec2(1.0\u002F32.0,1.0\u002F24.0); \n\n\u002F\u002F Amount of shadow mask.\nfloat maskDark=0.5;\nfloat maskLight=1.5;\n\n\u002F\u002F------------------------------------------------------------------------\n\n\u002F\u002F sRGB to Linear.\n\u002F\u002F Assuing using sRGB typed textures this should not be needed.\nfloat ToLinear1(float c){return(c\u003C=0.04045)?c\u002F12.92:pow((c+0.055)\u002F1.055,2.4);}\nvec3 ToLinear(vec3 c){return vec3(ToLinear1(c.r),ToLinear1(c.g),ToLinear1(c.b));}\n\n\u002F\u002F Linear to sRGB.\n\u002F\u002F Assuing using sRGB typed textures this should not be needed.\nfloat ToSrgb1(float c){return(c\u003C0.0031308?c*12.92:1.055*pow(c,0.41666)-0.055);}\nvec3 ToSrgb(vec3 c){return vec3(ToSrgb1(c.r),ToSrgb1(c.g),ToSrgb1(c.b));}\n\n\u002F\u002F Nearest emulated sample given floating point position and texel offset.\n\u002F\u002F Also zero's off screen.\nvec3 Fetch(vec2 pos,vec2 off){\n pos=floor(pos*res+off)\u002Fres;\n if(max(abs(pos.x-0.5),abs(pos.y-0.5))\u003E0.5)return vec3(0.0,0.0,0.0);\n return ToLinear(texture2D(tDiffuse,pos.xy,-16.0).rgb);}\n\n\u002F\u002F Distance in emulated pixels to nearest texel.\nvec2 Dist(vec2 pos){pos=pos*res;return -((pos-floor(pos))-vec2(0.5));}\n \n\u002F\u002F 1D Gaussian.\nfloat Gaus(float pos,float scale){return exp2(scale*pos*pos);}\n\n\u002F\u002F 3-tap Gaussian filter along horz line.\nvec3 Horz3(vec2 pos,float off){\n vec3 b=Fetch(pos,vec2(-1.0,off));\n vec3 c=Fetch(pos,vec2( 0.0,off));\n vec3 d=Fetch(pos,vec2( 1.0,off));\n float dst=Dist(pos).x;\n \u002F\u002F Convert distance to weight.\n float scale=hardPix;\n float wb=Gaus(dst-1.0,scale);\n float wc=Gaus(dst+0.0,scale);\n float wd=Gaus(dst+1.0,scale);\n \u002F\u002F Return filtered sample.\n return (d*wd)\u002F(wb+wc+wd);}\n\n\u002F\u002F 5-tap Gaussian filter along horz line.\nvec3 Horz5(vec2 pos,float off){\n vec3 a=Fetch(pos,vec2(-2.0,off));\n vec3 b=Fetch(pos,vec2(-1.0,off));\n vec3 c=Fetch(pos,vec2( 0.0,off));\n vec3 d=Fetch(pos,vec2( 1.0,off));\n vec3 e=Fetch(pos,vec2( 2.0,off));\n float dst=Dist(pos).x;\n \u002F\u002F Convert distance to weight.\n float scale=hardPix;\n float wa=Gaus(dst-2.0,scale);\n float wb=Gaus(dst-1.0,scale);\n float wc=Gaus(dst+0.0,scale);\n float wd=Gaus(dst+1.0,scale);\n float we=Gaus(dst+2.0,scale);\n \u002F\u002F Return filtered sample.\n return (a*wa+b*wb+c*wc+d*wd+e*we)\u002F(wa+wb+wc+wd+we);}\n\n\u002F\u002F Return scanline weight.\nfloat Scan(vec2 pos,float off){\n float dst=Dist(pos).y;\n return Gaus(dst+off,hardScan);}\n\n\u002F\u002F Allow nearest three lines to effect pixel.\nvec3 Tri(vec2 pos){\n vec3 a=Horz3(pos,-1.0);\n vec3 b=Horz5(pos, 0.0);\n vec3 c=Horz3(pos, 1.0);\n float wa=Scan(pos,-1.0);\n float wb=Scan(pos, 0.0);\n float wc=Scan(pos, 1.0);\n return a*wa+b*wb+c*wc;}\n\n\u002F\u002F Distortion of scanlines, and end of screen alpha.\nvec2 Warp(vec2 pos){\n pos=pos*2.0-1.0; \n pos*=vec2(1.0+(pos.y*pos.y)*warp.x,1.0+(pos.x*pos.x)*warp.y);\n return pos*0.5+0.5;}\n\n\u002F\u002F Shadow mask.\nvec3 Mask(vec2 pos){\n pos.x+=pos.y*3.0;\n\n vec3 mask=vec3(maskDark,maskDark,maskDark);\n pos.x=fract(pos.x\u002F6.0);\n\n\n\n if(pos.x\u003C0.333333333)mask.r=maskLight;\n else if(pos.x\u003C0.66666666)mask.g=maskLight;\n else mask.b=maskLight;\n return mask;} \n\n\u002F\u002F Draw dividing bars.\nfloat Bar(float pos,float bar){pos-=bar;return pos*pos\u003C4.0?0.0:1.0;}\n\n\u002F\u002F Entry.\nvoid main(){\n vec2 fragCoord = vUv * iResolution;\n\n vec2 pos=Warp(fragCoord.xy\u002FiResolution.xy+vec2(0.0,0.0));\n\n gl_FragColor.rgb=Tri(pos)*Mask(fragCoord.xy);\n\n gl_FragColor.a=1.0; \n gl_FragColor.rgb=ToSrgb(gl_FragColor.rgb);\n\n }\n\n \n\n\n `,\n}\n","id":"mod_2eTjsJLajADpzjPRoD14Hb","is_binary":false,"title":"CrtShader.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"ByuxHTiBj-Y","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"SJMBpiBsWt"},{"code":"export const DitherShader = {\n uniforms: {\n tDiffuse: { type: \"t\", value: null },\n tDither: { type: \"t\", value: null },\n tPalette: { type: \"t\", value: null },\n\n u_bit_depth: { type: \"i\", value: 32 },\n u_contrast: { value: 1 },\n u_offset: { value: 0 },\n u_dither_size: { type: \"i\", value: 2 },\n },\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n \u002F* \n This shader is under MIT license. Feel free to use, improve and \n change this shader according to your needs and consider sharing \n the modified result to godotshaders.com.\n *\u002F\n\n uniform sampler2D tDiffuse;\n\t\tvarying vec2 vUv;\n\n uniform sampler2D tDither;\n uniform sampler2D tPalette;\n\n uniform int u_bit_depth;\n uniform float u_contrast;\n uniform float u_offset;\n uniform int u_dither_size;\n\n void main() \n {\n \u002F\u002F sample the screen texture at the desired output resolution (according to u_dither_size)\n \u002F\u002F this will effectively pixelate the resulting output\n vec2 screen_size = vec2(textureSize(tDiffuse, 0)) \u002F float(u_dither_size);\n vec2 screen_sample_uv = floor(vUv * screen_size) \u002F screen_size;\n vec3 screen_col = texture(tDiffuse, screen_sample_uv).rgb;\n \n \u002F\u002F calculate pixel luminosity (https:\u002F\u002Fstackoverflow.com\u002Fquestions\u002F596216\u002Fformula-to-determine-brightness-of-rgb-color)\n float lum = (screen_col.r * 0.299) + (screen_col.g * 0.587) + (screen_col.b * 0.114);\n \n \u002F\u002F adjust with contrast and offset parameters\n float contrast = u_contrast;\n lum = (lum - 0.5 + u_offset) * contrast + 0.5;\n lum = clamp(lum, 0.0, 1.0);\n \n \u002F\u002F reduce luminosity bit depth to give a more banded visual if desired\t\n float bits = float(u_bit_depth);\n lum = floor(lum * bits) \u002F bits;\n \n \u002F\u002F to support multicolour palettes, we want to dither between the two colours on the palette\n \u002F\u002F which are adjacent to the current pixel luminosity.\n \u002F\u002F to do this, we need to determine which 'band' lum falls into, calculate the upper and lower\n \u002F\u002F bound of that band, then later we will use the dither texture to pick either the upper or \n \u002F\u002F lower colour.\n \n \u002F\u002F get the palette texture size mapped so it is 1px high (so the x value however many colour bands there are)\n ivec2 col_size = textureSize(tPalette, 0);\n ivec2 test = col_size;\n col_size \u002F= col_size.y;\n \n float col_x = float(col_size.x) - 1.0; \u002F\u002F colour boundaries is 1 less than the number of colour bands\n float col_texel_size = 1.0 \u002F col_x; \u002F\u002F the size of one colour boundary\n \n lum = max(lum - 0.00001, 0.0); \u002F\u002F makes sure our floor calculation below behaves when lum == 1.0\n float lum_lower = floor(lum * col_x) * col_texel_size;\n float lum_upper = (floor(lum * col_x) + 1.0) * col_texel_size;\n float lum_scaled = lum * col_x - floor(lum * col_x); \u002F\u002F calculates where lum lies between the upper and lower bound\n \n \u002F\u002F map the dither texture onto the screen. there are better ways of doing this that makes the dither pattern 'stick'\n \u002F\u002F with objects in the 3D world, instead of being mapped onto the screen. see lucas pope's details posts on how he \n \u002F\u002F achieved this in Obra Dinn: https:\u002F\u002Fforums.tigsource.com\u002Findex.php?topic=40832.msg1363742#msg1363742\n ivec2 noise_size = textureSize(tDither, 0);\n vec2 inv_noise_size = vec2(1.0 \u002F float(noise_size.x), 1.0 \u002F float(noise_size.y));\n vec2 noise_uv = vUv * inv_noise_size * vec2(float(screen_size.x), float(screen_size.y));\n float threshold = texture(tDither, noise_uv).r;\n \n \u002F\u002F adjust the dither slightly so min and max aren't quite at 0.0 and 1.0\n \u002F\u002F otherwise we wouldn't get fullly dark and fully light dither patterns at lum 0.0 and 1.0\n threshold = threshold * 0.99 + 0.005;\n \n \u002F\u002F the lower lum_scaled is, the fewer pixels will be below the dither threshold, and thus will use the lower bound colour,\n \u002F\u002F and vice-versa\n float ramp_val = lum_scaled \u003C threshold ? 0.0f : 1.0f;\n \u002F\u002F sample at the lower bound colour if ramp_val is 0.0, upper bound colour if 1.0\n float col_sample = mix(lum_lower, lum_upper, ramp_val);\n vec3 final_col = texture(tDiffuse, vec2(col_sample, 0.5)).rgb;\n \n \u002F\u002F return the final colour!\n gl_FragColor = vec4(final_col, 1.0);\n }\n `,\n}\n","id":"mod_8L5L7kixtWqsjP9njbeheK","is_binary":false,"title":"DitherShader.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rkKxSTjHjbt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"SJMBpiBsWt"},{"code":"export const NoiseShader = {\n uniforms: {\n tDiffuse: { type: \"t\", value: null },\n amount: { value: 0.08 },\n speed: { value: 0.5 },\n time: { value: 0 },\n },\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n uniform sampler2D tDiffuse;\n\t\tuniform float amount;\n\t\tuniform float speed;\n\t\tuniform float time;\n\n\t\tvarying vec2 vUv;\n\n\n\t\tfloat random(vec2 n, float offset ){\n\t\t\treturn .5 - fract(sin(dot(n.xy + vec2( offset, 0. ), vec2(12.9898, 78.233)))* 43758.5453);\n\t\t}\n\n\t\tvoid main() {\n\t\t\tvec3 color = texture2D(tDiffuse, vUv).rgb;\n\t\t\tcolor += vec3( amount * random( vUv, .0001 * speed * time ) );\n\t\t\tgl_FragColor = vec4(color,1.0);\n\t\t}\n `,\n}\n","id":"mod_32aTi8VREXRfZoz8arD57E","is_binary":false,"title":"NoiseShader.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"H1qgS6iSsbt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"SJMBpiBsWt"},{"code":"export const RGBShiftShader = {\n uniforms: {\n tDiffuse: { value: null },\n amount: { value: 0.02 },\n },\n\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n\t\tuniform sampler2D tDiffuse;\n\t\tuniform float amount;\n\n\t\tvarying vec2 vUv;\n\n\t\tvoid main() {\n\n\t\t\tvec2 offset = vec2( -0.1, 0.) * amount;\n\t\t\tvec4 cr = texture2D(tDiffuse, vUv + offset);\n\t\t\tvec4 cga = texture2D(tDiffuse, vUv);\n\t\t\tvec4 cb = texture2D(tDiffuse, vUv - offset);\n\t\t\tgl_FragColor = vec4(cr.r, cga.g, cb.b, cga.a);\n\t\t}\n `,\n}\n","id":"mod_QsKC2yLYTBTLHxWVpUyF47","is_binary":false,"title":"RGBShiftShader.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BJixrasBoWF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"SJMBpiBsWt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fbayer2tile16.png","id":"mod_5tSqFi1DQAjeQowZ47PPEF","is_binary":true,"title":"bayer2tile16.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SkcZSpjHoWF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_mono.png","id":"mod_FZc1B92cVr8QexmcEm9QY9","is_binary":true,"title":"palette_mono.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SkoZH6sSsWK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_eeve.png","id":"mod_YV8fL4gqCNNZS92xGxzerG","is_binary":true,"title":"palette_eeve.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"Hyn-BajHjbt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fbayer4tile8.png","id":"mod_SdSgyeebGHKMJgeutS3fsH","is_binary":true,"title":"bayer4tile8.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HyTWHaoSi-F","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_hollow.png","id":"mod_MDs4jdaiREGRF2DwZdtxwU","is_binary":true,"title":"palette_hollow.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SkC-rTiBsbK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"\u003C!DOCTYPE html\u003E\n\u003Chtml lang=\"en\"\u003E\n\n\u003Chead\u003E\n\t\u003Cmeta charset=\"utf-8\"\u003E\n\t\u003Cmeta name=\"viewport\" content=\"width=device-width, initial-scale=1, shrink-to-fit=no\"\u003E\n\t\u003Cmeta name=\"theme-color\" content=\"#000000\"\u003E\n\t\u003C!--\n manifest.json provides metadata used when your web app is added to the\n homescreen on Android. See https:\u002F\u002Fdevelopers.google.com\u002Fweb\u002Ffundamentals\u002Fengage-and-retain\u002Fweb-app-manifest\u002F\n --\u003E\n\t\u003Clink rel=\"manifest\" href=\"%PUBLIC_URL%\u002Fmanifest.json\"\u003E\n\t\u003Clink rel=\"shortcut icon\" href=\"%PUBLIC_URL%\u002Ffavicon.ico\"\u003E\n\t\u003C!--\n Notice the use of %PUBLIC_URL% in the tags above.\n It will be replaced with the URL of the `public` folder during the build.\n Only files inside the `public` folder can be referenced from the HTML.\n\n Unlike \"\u002Ffavicon.ico\" or \"favicon.ico\", \"%PUBLIC_URL%\u002Ffavicon.ico\" will\n work correctly both with client-side routing and a non-root public URL.\n Learn how to configure a non-root public URL by running `npm run build`.\n --\u003E\n\t\u003Ctitle\u003EReact App\u003C\u002Ftitle\u003E\n\u003C\u002Fhead\u003E\n\n\u003Cbody\u003E\n\t\u003Cnoscript\u003E\n\t\tYou need to enable JavaScript to run this app.\n\t\u003C\u002Fnoscript\u003E\n\t\u003Cdiv id=\"root\"\u003E\u003C\u002Fdiv\u003E\n\t\u003C!--\n This HTML file is a template.\n If you open it directly in the browser, you will see an empty page.\n\n You can add webfonts, meta tags, or analytics to this file.\n The build step will place the bundled scripts into the \u003Cbody\u003E tag.\n\n To begin the development, run `npm start` or `yarn start`.\n To create a production bundle, use `npm run build` or `yarn build`.\n --\u003E\n\u003C\u002Fbody\u003E\n\n\u003C\u002Fhtml\u003E","id":"mod_JqG5VbkQ8yXbm52qRUb6SN","is_binary":false,"title":"index.html","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SJ3rTiBsWY","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"S1rasroZY"},{"code":"{\n \"infiniteLoopProtection\": false,\n \"hardReloadOnChange\": false,\n \"view\": \"browser\"\n}\n","id":"mod_AjGb7xuQamevhWuGEp8pp4","is_binary":false,"title":"sandbox.config.json","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rJ6S6oHsbF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":null},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fbayer16tile2.png","id":"mod_JbE6hYP5ftyDssqumWAuhe","is_binary":true,"title":"bayer16tile2.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rk_-rTsSiWt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fbayer8tile4.png","id":"mod_2FCrB4NtW29xVZb2SR8J86","is_binary":true,"title":"bayer8tile4.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rkK-S6sHsWt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"import * as THREE from \"three\"\nimport React, { useRef, useMemo, useEffect, useState } from \"react\"\nimport SimplexNoise from \"simplex-noise\"\nimport { useFrame, useLoader } from \"@react-three\u002Ffiber\"\nimport bg from \".\u002Fresources\u002Fseamless8.png\"\nimport \".\u002Fmaterials\u002FShinyMaterial\"\nimport { Html } from \"@react-three\u002Fdrei\"\n\nvar RIBBON_LEN = 100 \u002F\u002Fnumber of spine point\n\nfunction createRibbonGeom() {\n \u002F\u002Fmake geometry, faces & colors for a ribbon\n var i\n\n const positions = []\n const normals = []\n const uvs = []\n const vertexColors = []\n\n \u002F\u002Fcreate verts + colors\n for (i = 0; i \u003C RIBBON_LEN; i++) {\n positions.push(new THREE.Vector3(i, i, 0))\n positions.push(new THREE.Vector3(i, i + 10, 0))\n vertexColors.push(new THREE.Color())\n vertexColors.push(new THREE.Color())\n }\n\n const indices = []\n\n \u002F\u002Fcreate faces\n for (i = 0; i \u003C RIBBON_LEN - 1; i++) {\n \u002F\u002F geom.faces.push(new THREE.Face3(i * 2, i * 2 + 1, i * 2 + 2))\n \u002F\u002F geom.faces.push(new THREE.Face3(i * 2 + 1, i * 2 + 3, i * 2 + 2))\n\n indices.push(i * 2)\n indices.push(i * 2 + 1)\n indices.push(i * 2 + 2)\n\n indices.push(i * 2 + 1)\n indices.push(i * 2 + 3)\n indices.push(i * 2 + 2)\n }\n\n return { positions, indices }\n}\n\nfunction lerp(v0, v1, t) {\n return v0 * (1 - t) + v1 * t\n}\n\nconst noise = new SimplexNoise()\n\nfunction getNoiseAngle(time, noiseId, zOffset) {\n \u002F\u002F return noise.noise3d( noiseTime, noiseId, zOffset ) * Math.PI*2;\n \u002F\u002F console.log(time, noiseId, zOffset)\n return noise.noise3D(time, noiseId, zOffset) * Math.PI * 2\n \u002F\u002F return Math.PI * 2 * (Math.sin(time) + 1)\n}\n\nexport default function Ribbon({ id = 1, color = \"red\" }) {\n const material = useRef()\n const mesh = useRef()\n const [texture] = useLoader(THREE.TextureLoader, [bg])\n texture.wrapS = texture.wrapT = THREE.RepeatWrapping\n\n const up = new THREE.Vector3(1, 0, 0)\n const direction = useRef(new THREE.Vector3(0, 0, 0))\n const normal = useRef(new THREE.Vector3(0, 0, 0))\n\n const arm1 = useRef(new THREE.Vector3())\n const arm2 = useRef(new THREE.Vector3())\n const arm3 = useRef(new THREE.Vector3())\n\n const arm1T = useRef(new THREE.Vector3())\n const arm2T = useRef(new THREE.Vector3())\n const arm3T = useRef(new THREE.Vector3())\n\n const xAxis = new THREE.Vector3(1, 0, 0)\n const yAxis = new THREE.Vector3(0, 1, 0)\n const zAxis = new THREE.Vector3(0, 0, 1)\n\n const noiseId = useRef(-1)\n const ribbonWidth = useRef(-1)\n\n const head = useRef(new THREE.Vector3(0, 0, 0))\n const prev = useRef(new THREE.Vector3(0, 0, 0))\n\n const geometry = useRef()\n\n const [debug, setDebug] = useState(\"\")\n\n const onInit = () =\u003E {\n noiseId.current = id \u002F 300\n\n \u002F\u002F generate thiccness\n ribbonWidth.current = Math.random() * 1.5 + 0.25\n if (Math.random() \u003C 0.2) {\n ribbonWidth.current = 4\n }\n\n \u002F\u002Fhead is the thing that moves, prev follows behind\n head.current = new THREE.Vector3(0, 0, 0)\n prev.current = new THREE.Vector3(0, 0, 0)\n\n \u002F\u002F construct mesh\n const geo = createRibbonGeom()\n geometry.current.setFromPoints(geo.positions)\n geometry.current.setIndex(geo.indices)\n geometry.current.computeVertexNormals()\n\n \u002F\u002Fmovement arm vectors\n var armLenFac = 1.7\n arm1.current = new THREE.Vector3(8 * armLenFac, 0, 0)\n arm2.current = new THREE.Vector3(4 * armLenFac, 0, 0)\n arm3.current = new THREE.Vector3(1.5 * armLenFac, 0, 0)\n\n onReset()\n }\n\n const onReset = () =\u003E {\n var i\n\n \u002F\u002Freset prev position\n prev.current.copy(head.current)\n\n \u002F\u002Freset mesh geom\n for (i = 0; i \u003C RIBBON_LEN; i++) {\n geometry.current.attributes.position.array[i * 6] = head.current.x\n geometry.current.attributes.position.array[i * 6 + 1] = head.current.y\n geometry.current.attributes.position.array[i * 6 + 2] = head.current.z\n geometry.current.attributes.position.array[i * 6 + 3] = head.current.x\n geometry.current.attributes.position.array[i * 6 + 4] = head.current.y\n geometry.current.attributes.position.array[i * 6 + 5] = head.current.z\n }\n\n \u002F\u002Finit colors for this ribbon\n \u002F\u002Fhue is set by noiseId\n let hue1 = (noiseId + Math.random() * 0.01) % 2\n let hue2 = (noiseId + Math.random() * 0.01) % 2\n\n if (Math.random() \u003C 0.1) {\n hue1 = Math.random()\n }\n if (Math.random() \u003C 0.1) {\n hue2 = Math.random()\n }\n\n var sat = Math.random() * 0.4 + 0.6\n var lightness = Math.random() * 0.4 + 0.2\n\n var col = new THREE.Color()\n\n \u002F\u002F TODO: fix this so the ribbons change colour along their length\n for (i = 0; i \u003C RIBBON_LEN - 1; i++) {\n \u002F\u002Fadd lightness gradient based on spine position\n col.setHSL(lerp(hue1, hue2, i \u002F RIBBON_LEN), sat, lightness)\n\n \u002F\u002F this.meshGeom.faces[i*2].color.copy(col);\n \u002F\u002F this.meshGeom.faces[i*2+1].color.copy(col);\n }\n\n geometry.current.verticesNeedUpdate = true\n geometry.current.colorsNeedUpdate = true\n }\n\n useFrame(({ clock }, delta) =\u003E {\n \u002F\u002F material.current.time += delta + Math.sin(clock.elapsedTime \u002F 10) \u002F 1000\n mesh.current.rotation.x = Math.sin(clock.elapsedTime) * (Math.PI \u002F 20)\n mesh.current.rotation.y = Math.cos(clock.elapsedTime) * (Math.PI \u002F 20)\n mesh.current.scale.y = 0.9 + 0.1 * Math.sin(clock.elapsedTime)\n mesh.current.scale.x = 0.9 + 0.1 * Math.cos(clock.elapsedTime)\n\n prev.current.copy(head.current)\n\n \u002F\u002Fmove arms\n arm1T.current.copy(arm1.current)\n arm2T.current.copy(arm2.current)\n arm3T.current.copy(arm3.current)\n\n const t = clock.elapsedTime \u002F 5.0\n\n arm1T.current.applyAxisAngle(zAxis, getNoiseAngle(t, noiseId.current, 0))\n arm1T.current.applyAxisAngle(yAxis, getNoiseAngle(t, noiseId.current, 20))\n\n arm2T.current.applyAxisAngle(zAxis, getNoiseAngle(t, noiseId.current, 50))\n arm2T.current.applyAxisAngle(xAxis, getNoiseAngle(t, noiseId.current, 70))\n\n arm3T.current.applyAxisAngle(xAxis, getNoiseAngle(t, noiseId.current, 100))\n arm3T.current.applyAxisAngle(yAxis, getNoiseAngle(t, noiseId.current, 150))\n\n \u002F\u002FMOVE HEAD\n head.current.copy(arm1T.current).add(arm2T.current).add(arm3T.current)\n\n \u002F\u002Fcalc new L + R edge positions from direction between head and prev\n direction.current.subVectors(head.current, prev.current).normalize()\n normal.current.crossVectors(direction.current, up).normalize()\n normal.current.multiplyScalar(ribbonWidth.current)\n\n \u002F\u002Fshift each 2 verts down one posn\n \u002F\u002Fe.g. copy verts (0,1) -\u003E (2,3)\n const verts = geometry.current.attributes.position.array\n for (var i = RIBBON_LEN - 1; i \u003E 0; i--) {\n verts[i * 6] = verts[(i - 1) * 6]\n verts[i * 6 + 1] = verts[(i - 1) * 6 + 1]\n verts[i * 6 + 2] = verts[(i - 1) * 6 + 2]\n verts[i * 6 + 3] = verts[(i - 1) * 6 + 3]\n verts[i * 6 + 4] = verts[(i - 1) * 6 + 4]\n verts[i * 6 + 5] = verts[(i - 1) * 6 + 5]\n }\n\n \u002F\u002Fpopulate 1st 2 verts with left and right normalHelper\n const v0 = new THREE.Vector3().copy(head.current).add(normal.current)\n verts[0] = v0.x\n verts[1] = v0.y\n verts[2] = v0.z\n const v1 = new THREE.Vector3().copy(head.current).sub(normal.current)\n verts[3] = v1.x\n verts[4] = v1.y\n verts[5] = v1.z\n\n geometry.current.attributes.position.needsUpdate = true\n geometry.current.computeFaceNormals()\n geometry.current.computeVertexNormals()\n\n \u002F\u002F setDebug(`${head.current.x}, ${head.current.y}, ${head.current.z}`)\n })\n\n useEffect(() =\u003E {\n onInit()\n }, [onInit])\n\n return (\n \u003C\u003E\n \u003Cmesh position={[0, 0, 0]} ref={mesh}\u003E\n {\u002F* \u003CshinyMaterial ref={material} attach=\"material\" noiseTexture={texture} \u002F\u003E *\u002F}\n \u003CmeshStandardMaterial attach=\"material\" color={color} side={THREE.DoubleSide} \u002F\u003E\n \u003CbufferGeometry attach=\"geometry\" ref={geometry} \u002F\u003E\n {\u002F* \u003CmeshPhongMaterial\n attach=\"material\"\n side={THREE.DoubleSide}\n vertexColors={THREE.FaceColors}\n color={\"#FFFFFF\"}\n shininess={30}\n specular={0x50473b}\n frustumCulled={false}\n \u002F\u003E *\u002F}\n \u003C\u002Fmesh\u003E\n {\u002F* \u003CHtml\u003E\n \u003Cp\u003E{debug}\u003C\u002Fp\u003E\n \u003C\u002FHtml\u003E *\u002F}\n \u003C\u002F\u003E\n )\n}\n","id":"mod_XfjyiuMTBzDf5oQPb2Ai8T","is_binary":false,"title":"Ribbon.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HyfxSTiSo-K","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyxSToBs-F"},{"code":"export const DIMENSIONS = {\n width: window.innerWidth,\n height: window.innerHeight,\n}\n","id":"mod_X95oaYketahUANTpXezvos","is_binary":false,"title":"const.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"ByNlB6orobF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyxSToBs-F"},{"code":"export const SpaceShader = {\n uniforms: {\n tDiffuse: { value: null },\n time: { value: 0 },\n opacity: { value: 0.15 },\n },\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n uniform sampler2D tDiffuse;\n\t\tuniform float time;\n\t\tuniform float opacity;\n\n\t\tvarying vec2 vUv;\n\n\t\t#define PI 3.14159265359\n\n\t\tmat2 rotate2d(float _angle){\n\t\t\treturn mat2(cos(_angle),-sin(_angle),\n\t\t\t\tsin(_angle),cos(_angle));\n\t\t}\n\n\t\tfloat hash( vec2 p )\n\t\t{\n\t\t\tfloat h = dot(p,vec2(127.1,311.7));\n\t\t\treturn -1.0 + 2.0*fract(sin(h)*43758.5453123);\n\t\t}\n\n\t\t\u002F\u002Fcheap value noise https:\u002F\u002Fwww.shadertoy.com\u002Fview\u002Flsf3WH\n\t\tfloat noise( in vec2 p )\n\t\t{\n\t\t\tvec2 i = floor( p );\n\t\t\tvec2 f = fract( p );\n\n\t\t\tvec2 u = f*f*(3.0-2.0*f);\n\n\t\t\treturn mix( mix( hash( i + vec2(0.0,0.0) ),\n\t\t\t hash( i + vec2(1.0,0.0) ), u.x),\n\t\t\tmix( hash( i + vec2(0.0,1.0) ),\n\t\t\t hash( i + vec2(1.0,1.0) ), u.x), u.y);\n\t\t}\n\n\t\tvoid main( )\n\t\t{\n\n\t\t\tvec2 uv = vUv;\n\t\t\tvec3 orig = texture2D(tDiffuse, uv).rgb;\n\n\t\t\t\u002F\u002Frotate uv space\n\t\t\tuv -= 0.5;\n\t\t\tuv = rotate2d( fract(time\u002F40.0)*2.0*PI ) * uv;\n\t\t\tuv += 0.5;\n\n\t\t\t\u002F\u002F4 corner grad blue\u002Fpink\n\t\t\tvec3 col1 = vec3(uv,1.0);\n\n\t\t\t\u002F\u002F4 corner grad pink\u002Forang\n\t\t\tvec3 col2 = vec3(1,uv);\n\n\t\t\t\u002F\u002Fmix over time\n\t\t\tfloat t = abs(2.0 * fract(time\u002F10.0) - 1.0);\n\t\t\tvec3 col = mix(col1,col2,t);\n\n\t\t\t\u002F\u002Fnoise clouds additive blend\n\t\t\t\u002F\u002Ff returns in -1 - 1 range'\n\t\t\t\u002F\u002Fsimple 1 octave\n\t\t\tfloat tn = time\u002F5.0;\n\t\t\tvec2 uvn = uv * 2.; \u002F\u002F noise scale\n\t\t\tfloat f = noise( uvn + tn);\n\t\t\tf *= 0.8;\n\t\t\tcol += f;\n\t\t\t\u002F\u002Fdarker\n\t\t\tcol -= 0.4;\n\n\t\t\tgl_FragColor = vec4(orig + col * opacity, 1.0);\n\t\t}\n `,\n}\n","id":"mod_QwS4FARkzCyAnrjHMUumKP","is_binary":false,"title":"SpaceShader.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HkneBTjSibt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"SJMBpiBsWt"},{"code":"export const TiltShiftShader = {\n uniforms: {\n tDiffuse: { type: \"t\", value: null },\n focusPos: { type: \"f\", value: 0.5 }, \u002F\u002F focus Y position 0 = bottom, 1 = top\n range: { type: \"f\", value: 0.5 }, \u002F\u002F distance to fade between none and full blur\n offset: { type: \"f\", value: 0.02 }, \u002F\u002F blur distance\n strength: { type: \"f\", value: 0.9 }, \u002F\u002F 0 = passthru, 1 = full\n },\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n uniform sampler2D tDiffuse;\n\t\tuniform float focusPos;\n\t\tuniform float offset;\n\t\tuniform float range;\n\t\tuniform float strength;\n\n\t\tvarying vec2 vUv;\n\n\t\tvoid main() {\n\n\t\t\tvec4 orig = texture2D(tDiffuse, vUv);\n\n\t\t\tvec2 uv = vUv;\n\n\t\t\tfloat tiltAmt = 1.0 - smoothstep(focusPos - range,focusPos,uv.y) + smoothstep(focusPos,focusPos + range,uv.y);\n\n\t\t\t\u002F\u002Fblur image\n\t\t\tvec4 blurImg = vec4( 0.0 );\n\n\t\t\tfloat offset9 = offset * 0.9;\n\t\t\tfloat offset7 = offset * 0.7;\n\t\t\tfloat offset4 = offset * 0.4;\n\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, 0.4 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.15, 0.37 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, 0.29 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.37, 0.15 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.40, 0.0 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.37, -0.15 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, -0.29 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.15, -0.37 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, -0.4 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.15, 0.37 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, 0.29 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.37, 0.15 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.4, 0.0 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.37, -0.15 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, -0.29 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.15, -0.37 ) * offset );\n\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.15, 0.37 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.37, 0.15 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.37, -0.15 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.15, -0.37 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.15, 0.37 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.37, 0.15 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.37, -0.15 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.15, -0.37 ) * offset9 );\n\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, 0.29 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.40, 0.0 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, -0.29 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, -0.4 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, 0.29 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.4, 0.0 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, -0.29 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, 0.4 ) * offset7 );\n\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, 0.29 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.4, 0.0 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, -0.29 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, -0.4 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, 0.29 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.4, 0.0 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, -0.29 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, 0.4 ) * offset4 );\n\n\t\t\tblurImg = blurImg \u002F 41.0;\n\n\t\t\tgl_FragColor = mix(orig, blurImg, tiltAmt * strength);\n\n\t\t}\n `,\n}\n","id":"mod_StoDJTwcpJkbfKk82edRvg","is_binary":false,"title":"TiltShiftShader.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"H1peBajBjZt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"SJMBpiBsWt"},{"code":"html,\nbody,\n#root {\n margin: 0;\n padding: 0;\n width: 100%;\n height: 100%;\n}\n","id":"mod_WqvMGQdBZPSZP5E4kZ4Kcg","is_binary":false,"title":"styles.css","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BkClSajSjZY","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyxSToBs-F"},{"code":"\u002F*\n * Easing Functions - inspired from http:\u002F\u002Fgizma.com\u002Feasing\u002F\n * only considering the t value for the range [0, 1] =\u003E [0, 1]\n *\u002F\nconst EasingFunctions = {\n \u002F\u002F no easing, no acceleration\n linear: (t: number) =\u003E t,\n \u002F\u002F accelerating from zero velocity\n easeInQuad: (t: number) =\u003E t * t,\n \u002F\u002F decelerating to zero velocity\n easeOutQuad: (t: number) =\u003E t * (2 - t),\n \u002F\u002F acceleration until halfway, then deceleration\n easeInOutQuad: (t: number) =\u003E (t \u003C 0.5 ? 2 * t * t : -1 + (4 - 2 * t) * t),\n \u002F\u002F accelerating from zero velocity\n easeInCubic: (t: number) =\u003E t * t * t,\n \u002F\u002F decelerating to zero velocity\n easeOutCubic: (t: number) =\u003E --t * t * t + 1,\n \u002F\u002F acceleration until halfway, then deceleration\n easeInOutCubic: (t: number) =\u003E (t \u003C 0.5 ? 4 * t * t * t : (t - 1) * (2 * t - 2) * (2 * t - 2) + 1),\n \u002F\u002F accelerating from zero velocity\n easeInQuart: (t: number) =\u003E t * t * t * t,\n \u002F\u002F decelerating to zero velocity\n easeOutQuart: (t: number) =\u003E 1 - --t * t * t * t,\n \u002F\u002F acceleration until halfway, then deceleration\n easeInOutQuart: (t: number) =\u003E (t \u003C 0.5 ? 8 * t * t * t * t : 1 - 8 * --t * t * t * t),\n \u002F\u002F accelerating from zero velocity\n easeInQuint: (t: number) =\u003E t * t * t * t * t,\n \u002F\u002F decelerating to zero velocity\n easeOutQuint: (t: number) =\u003E 1 + --t * t * t * t * t,\n \u002F\u002F acceleration until halfway, then deceleration\n easeInOutQuint: (t: number) =\u003E (t \u003C 0.5 ? 16 * t * t * t * t * t : 1 + 16 * --t * t * t * t * t),\n}\n\ntype E = typeof EasingFunctions\nexport type EaseFn = keyof E\n\nexport default EasingFunctions\n","id":"mod_3FhAQcr9Ejeqrd9HV3JPRL","is_binary":false,"title":"ease.ts","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HJybrpiroWY","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"H17r6oBjbF"},{"code":"import { useFrame, useThree } from \"@react-three\u002Ffiber\"\nimport { useMemo, useRef } from \"react\"\nimport { Observable, of, Subject } from \"rxjs\"\nimport { expand, filter, map, share } from \"rxjs\u002Foperators\"\n\n\u002F\u002F Yoinked from https:\u002F\u002Fwww.learnrxjs.io\u002Flearn-rxjs\u002Frecipes\u002Fgameloop\n\nexport interface IFrameData {\n frameStartTime: number\n deltaTime: number\n}\n\n\u002F**\n * clampTo30FPS(frame)\n *\n * @param frame - {IFrameData} the frame data to check if we need to clamp to max of\n * 30fps time.\n *\n * If we get sporadic LONG frames (browser was navigated away or some other reason the frame takes a while) we want to throttle that so we don't JUMP ahead in any deltaTime calculations too far.\n *\u002F\nexport const clampTo30FPS = (frame: IFrameData) =\u003E {\n if (frame.deltaTime \u003E 1 \u002F 30) {\n frame.deltaTime = 1 \u002F 30\n }\n return frame\n}\n\nexport const clampTo60FPS = (frame: IFrameData) =\u003E {\n if (frame.deltaTime \u003E 1 \u002F 60) {\n frame.deltaTime = 1 \u002F 60\n }\n return frame\n}\n\n\u002F**\n * This function returns an observable that will emit the next frame once the\n * browser has returned an animation frame step. Given the previous frame it calculates\n * the delta time, and we also clamp it to 30FPS in case we get long frames.\n *\u002F\nconst calculateStep: (prevFrame?: IFrameData) =\u003E Observable\u003CIFrameData\u003E = (prevFrame?: IFrameData) =\u003E {\n return new Observable((observer) =\u003E {\n requestAnimationFrame((frameStartTime) =\u003E {\n console.log(\"raf\")\n\n \u002F\u002F Millis to seconds\n const deltaTime = prevFrame ? (frameStartTime - prevFrame.frameStartTime) \u002F 1000 : 0\n observer.next({\n frameStartTime,\n deltaTime,\n })\n })\n })\n}\n\n\u002F\u002F This is our core stream of frames. We use expand to recursively call the\n\u002F\u002F `calculateStep` function above that will give us each new Frame based on the\n\u002F\u002F window.requestAnimationFrame calls. Expand emits the value of the called functions\n\u002F\u002F returned observable, as well as recursively calling the function with that same\n\u002F\u002F emitted value. This works perfectly for calculating our frame steps because each step\n\u002F\u002F needs to know the lastStepFrameTime to calculate the next. We also only want to request\n\u002F\u002F a new frame once the currently requested frame has returned.\nexport const frames$ = of(undefined).pipe(\n expand((val) =\u003E calculateStep(val)),\n \u002F\u002F Expand emits the first value provided to it, and in this\n \u002F\u002F case we just want to ignore the undefined input frame\n filter((frame) =\u003E frame !== undefined),\n map((frame: IFrameData) =\u003E frame.deltaTime),\n share(),\n)\n\nexport const useFrameStream = () =\u003E {\n const s = useRef\u003CSubject\u003Cnumber\u003E\u003E(new Subject\u003Cnumber\u003E())\n useFrame(({ clock }) =\u003E {\n s.current.next(clock.getDelta())\n })\n return s\n}\n","id":"mod_EVL2q4hrc5uEKmHJL1P3bT","is_binary":false,"title":"frameStream.ts","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SJxbSTsSjZF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"H17r6oBjbF"},{"code":"import { EaseFn, default as easing } from \".\u002Fease\"\nimport { concat, merge, Observable, of, Subject } from \"rxjs\"\nimport { delay, flatMap, map, share, switchMap, take, takeUntil, takeWhile, tap } from \"rxjs\u002Foperators\"\nimport * as React from \"react\"\nimport { useFrameStream } from \".\u002FframeStream\"\nimport { Animatable, Interpolator, mapInterpolator, ObservableSource } from \".\u002FuseAnimation\"\n\nexport const useAnimation = (source: ObservableSource, interpolator: Interpolator, duration: number, sink: (v: Animatable) =\u003E void) =\u003E {\n const underlying = React.useRef(source.value())\n\n const frames$ = useFrameStream()\n\n React.useEffect(() =\u003E {\n sink(underlying.current)\n\n const sub = source.changes\n .pipe(\n switchMap((v) =\u003E {\n const baseTime = Date.now()\n\n return concat(\n frames$.current.pipe(\n share(),\n map((dt) =\u003E (Date.now() - baseTime) \u002F duration),\n takeWhile((t) =\u003E t \u003C 1),\n ),\n of(1),\n ).pipe(map(mapInterpolator(interpolator, underlying.current, v).sample))\n }),\n )\n .subscribe((v) =\u003E {\n underlying.current = v\n sink(v)\n })\n\n return () =\u003E {\n sub.unsubscribe()\n }\n }, [duration, source, sink, interpolator, frames$])\n}\n","id":"mod_LzVzM9se1Nt669JQoLgZp9","is_binary":false,"title":"three.ts","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rkbWrTjBs-K","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"H17r6oBjbF"},{"code":"import { EaseFn, default as easing } from \".\u002Fease\"\nimport { concat, merge, Observable, of, Subject } from \"rxjs\"\nimport { delay, flatMap, map, share, switchMap, take, takeUntil, takeWhile, tap } from \"rxjs\u002Foperators\"\nimport * as React from \"react\"\nimport { frames$, useFrameStream } from \".\u002FframeStream\"\n\nexport function interpolator(from: number, to: number, easeFn: EaseFn) {\n const e: (t: number) =\u003E number = easing[easeFn]\n return {\n end: to,\n sample: (t: number) =\u003E from + (to - from) * e(t),\n }\n}\n\nexport function mapInterpolator(i: Interpolator, from: number, to: number) {\n return {\n end: to,\n sample: (t: number) =\u003E from + (to - from) * i.sample(t),\n }\n}\n\nexport type Interpolator = ReturnType\u003Ctypeof interpolator\u003E\n\nexport function sequence(a: Interpolator, b: Interpolator) {\n const sample = (t: number) =\u003E {\n if (t \u003C 0.5) {\n return a.sample(t * 2)\n } else {\n return b.sample((t - 0.5) * 2)\n }\n }\n\n return {\n sample,\n end: b.end,\n }\n}\n\nfunction sequenceN(...i: Interpolator[]) {\n const n = i.length\n\n const sample = (t: number) =\u003E {\n const slice = 1 \u002F n\n const index = t % slice\n \u002F\u002F cut t into n segments\n \u002F\u002F pick correct interpolator based on t\n }\n}\n\nexport type Animatable = number\n\nexport type ObservableSource = {\n changes: Subject\u003CAnimatable\u003E\n value: () =\u003E Animatable\n swap: (swapFn: (v: Animatable) =\u003E Animatable) =\u003E void\n set: (v: Animatable) =\u003E void\n}\n\nexport const useObservable = (v: Animatable): ObservableSource =\u003E {\n const val = React.useRef(v)\n\n const subject = new Subject\u003CAnimatable\u003E()\n\n return {\n changes: subject,\n value: () =\u003E val.current,\n swap: (swapFn: (v: Animatable) =\u003E Animatable) =\u003E {\n val.current = swapFn(val.current)\n subject.next(val.current)\n },\n set: (v: Animatable) =\u003E {\n val.current = v\n subject.next(v)\n },\n }\n}\n\nexport const useObservableState = (v: Animatable): ObservableSource =\u003E {\n const [val, setVal] = React.useState(v)\n\n const subject = React.useRef(new Subject\u003CAnimatable\u003E())\n\n return React.useMemo(\n () =\u003E ({\n changes: subject.current,\n value: () =\u003E val,\n swap: (swapFn: (v: Animatable) =\u003E Animatable) =\u003E {\n subject.current.next(swapFn(val))\n setVal(swapFn)\n },\n set: (v: Animatable) =\u003E {\n subject.current.next(v)\n setVal(v)\n },\n }),\n [val, setVal],\n )\n}\n\n\u002F\u002F frames$.subscribe(console.log)\n\nexport const useAnimation = (source: ObservableSource, interpolator: Interpolator, duration: number, sink: (v: Animatable) =\u003E void) =\u003E {\n const underlying = React.useRef(source.value())\n\n React.useEffect(() =\u003E {\n sink(underlying.current)\n\n const sub = source.changes\n .pipe(\n switchMap((v) =\u003E {\n const baseTime = Date.now()\n\n return concat(\n frames$.pipe(\n share(),\n map((dt) =\u003E (Date.now() - baseTime) \u002F duration),\n takeWhile((t) =\u003E t \u003C 1),\n ),\n of(1),\n ).pipe(map(mapInterpolator(interpolator, underlying.current, v).sample))\n }),\n )\n .subscribe((v) =\u003E {\n underlying.current = v\n sink(v)\n })\n\n return () =\u003E {\n sub.unsubscribe()\n }\n }, [duration, source, sink, interpolator])\n}\n","id":"mod_UnyPoFzxDJY9eTnGbpgge9","is_binary":false,"title":"useAnimation.ts","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HyMWHasSsZK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"H17r6oBjbF"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FfDYc-frog.png","id":"mod_XCxkD16LyP5moAFnDFoKAt","is_binary":true,"title":"frog.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-07T05:19:59","upload_id":null,"shortid":"HyDt7Z3Vt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"import * as THREE from \"three\"\nimport React, { useRef, useMemo, useEffect } from \"react\"\nimport { extend, useThree, useFrame, useLoader } from \"@react-three\u002Ffiber\"\nimport { EffectComposer } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FEffectComposer\"\nimport { ShaderPass } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FShaderPass\"\nimport { RenderPass } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FRenderPass\"\nimport { AfterimagePass } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FAfterimagePass\"\nimport { GlitchPass } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FGlitchPass\"\nimport { UnrealBloomPass } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FUnrealBloomPass\"\nimport { CrtShader } from \".\u002Fshaders\u002FCrtShader\"\nimport { TiltShiftShader } from \".\u002Fshaders\u002FTiltShiftShader\"\nimport { NoiseShader } from \".\u002Fshaders\u002FNoiseShader\"\nimport { DitherShader } from \".\u002Fshaders\u002FDitherShader\"\nimport { SpaceShader } from \".\u002Fshaders\u002FSpaceShader\"\nimport { RGBShiftShader } from \".\u002Fshaders\u002FRGBShiftShader\"\n\nimport palette from \".\u002Fresources\u002Fpalette_twilight.png\"\nimport dither from \".\u002Fresources\u002Fbayer16tile2.png\"\n\nextend({\n EffectComposer,\n ShaderPass,\n RenderPass,\n UnrealBloomPass,\n AfterimagePass,\n GlitchPass,\n})\n\nexport default function Effects() {\n const composer = useRef()\n const { scene, gl, size, camera } = useThree()\n const aspect = useMemo(() =\u003E new THREE.Vector2(512, 512), [])\n const noisePass = useRef()\n const spacePass = useRef()\n const ditherPass = useRef()\n\n const [paletteTex, ditherTex] = useLoader(THREE.TextureLoader, [palette, dither])\n ditherTex.wrapS = ditherTex.wrapT = THREE.RepeatWrapping\n console.log(paletteTex.image.width, paletteTex.image.height)\n console.log(ditherTex.image.width, ditherTex.image.height)\n\n useEffect(() =\u003E void composer.current.setSize(size.width, size.height), [size])\n useFrame(({ clock }) =\u003E {\n \u002F\u002F noisePass.current.uniforms.time.value = clock.elapsedTime\n \u002F\u002F spacePass.current.uniforms.time.value = clock.elapsedTime\n \u002F\u002F ditherPass.current.uniforms.tDither.value = ditherTex\n \u002F\u002F ditherPass.current.uniforms.tPalette.value = paletteTex\n composer.current.render()\n }, 1)\n\n return (\n \u003CeffectComposer ref={composer} args={[gl]}\u003E\n \u003CrenderPass attachArray=\"passes\" scene={scene} camera={camera} \u002F\u003E\n {\u002F* \u003CafterimagePass attachArray=\"passes\" args={[0.98]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n\n {\u002F* \u003CglitchPass\n attachArray=\"passes\"\n args={[0.1]}\n scene={scene}\n camera={camera}\n \u002F\u003E *\u002F}\n\n {\u002F* \u003CshaderPass attachArray=\"passes\" args={[TiltShiftShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n {\u002F* \u003CshaderPass ref={noisePass} attachArray=\"passes\" args={[NoiseShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n {\u002F* \u003CshaderPass ref={spacePass} attachArray=\"passes\" args={[SpaceShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n {\u002F* \u003CshaderPass attachArray=\"passes\" args={[RGBShiftShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n {\u002F* \u003CunrealBloomPass attachArray=\"passes\" args={[aspect, 0.4, 0.5, 0]} \u002F\u003E *\u002F}\n {\u002F* \u003CshaderPass ref={ditherPass} attachArray=\"passes\" args={[DitherShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n\n {\u002F* \u003CshaderPass attachArray=\"passes\" args={[CrtShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n \u003C\u002FeffectComposer\u003E\n )\n}\n\n\u002F\u002F const blendPass = new THREE.ShaderPass(THREE.BlendShader, \"tDiffuse1\");\n\u002F\u002F blendPass.uniforms[\"tDiffuse2\"].value = savePass.renderTarget.texture;\n\u002F\u002F blendPass.uniforms[\"mixRatio\"].value = 0.8;\n","id":"mod_83A5pkoGBvnBssxTbVzWfu","is_binary":false,"title":"Effects.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-07T05:00:17","upload_id":null,"shortid":"SkZgHairiWK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyxSToBs-F"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FmTMV-char.png","id":"mod_DomXBMmajL3UDgZDZD8NXt","is_binary":true,"title":"char.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-07T05:14:53","upload_id":null,"shortid":"rJrUMb2VK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002Feha5-thumbnail.png","id":"mod_Ea4rKBFdhybFS8byijKjUj","is_binary":true,"title":"thumbnail.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-07T05:59:04","upload_id":null,"shortid":"S1l23bnNK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":null},{"code":"import { useFrame, useThree } from \"@react-three\u002Ffiber\"\nimport { useCallback, useEffect, useRef } from \"react\"\nimport { Vector3 } from \"three\"\nimport { DIMENSIONS } from \".\u002Fconst\"\n\nexport function useMousePositionZ(targetZ) {\n const { camera } = useThree()\n\n const mousePos = useRef(new Vector3(0, 0, 0))\n const vec = useRef(new Vector3(0, 0, 0))\n const projectedPos = useRef(new Vector3(0, 0, 0))\n\n useFrame(() =\u003E {\n vec.current.copy(mousePos.current)\n vec.current.unproject(camera)\n vec.current.sub(camera.position).normalize()\n\n var distance = (targetZ - camera.position.z) \u002F vec.current.z\n\n \u002F\u002F Note: we want to avoid new-ing here if we can\n projectedPos.current = new Vector3().copy(camera.position).add(vec.current.multiplyScalar(distance))\n })\n\n const onMouseMoved = useCallback(\n (event) =\u003E {\n mousePos.current.set((event.clientX \u002F DIMENSIONS.width) * 2 - 1, -(event.clientY \u002F DIMENSIONS.height) * 2 + 1, 0.5)\n },\n [mousePos],\n )\n\n useEffect(() =\u003E {\n document.addEventListener(\"mousemove\", onMouseMoved)\n\n return () =\u003E {\n document.removeEventListener(\"mousemove\", onMouseMoved)\n }\n }, [onMouseMoved])\n\n return { mouse: mousePos, projected: projectedPos }\n}\n\nexport function useMousePositionY(targetY) {\n const { camera } = useThree()\n\n const mousePos = useRef(new Vector3(0, 0, 0))\n const vec = useRef(new Vector3(0, 0, 0))\n const projectedPos = useRef(new Vector3(0, 0, 0))\n\n useFrame(() =\u003E {\n vec.current.copy(mousePos.current)\n vec.current.unproject(camera)\n vec.current.sub(camera.position).normalize()\n\n var distance = (targetY - camera.position.y) \u002F vec.current.y\n\n \u002F\u002F Note: we want to avoid new-ing here if we can\n projectedPos.current = new Vector3().copy(camera.position).add(vec.current.multiplyScalar(distance))\n })\n\n const onMouseMoved = useCallback(\n (event) =\u003E {\n mousePos.current.set((event.clientX \u002F DIMENSIONS.width) * 2 - 1, -(event.clientY \u002F DIMENSIONS.height) * 2 + 1, 0.5)\n },\n [mousePos],\n )\n\n useEffect(() =\u003E {\n document.addEventListener(\"mousemove\", onMouseMoved)\n\n return () =\u003E {\n document.removeEventListener(\"mousemove\", onMouseMoved)\n }\n }, [onMouseMoved])\n\n return { mouse: mousePos, projected: projectedPos }\n}\n","id":"mod_F24nrWAqhHz7FnsBWLsAde","is_binary":false,"title":"useMousePosition.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-08T00:52:58","upload_id":null,"shortid":"5MBN8","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyxSToBs-F"},{"code":"import React from \"react\"\nimport ReactDOM from \"react-dom\"\nimport App from \".\u002FApp\"\nimport \".\u002Fstyles.css\"\n\nReactDOM.render(\n \u003CReact.StrictMode\u003E\n \u003CApp \u002F\u003E\n \u003C\u002FReact.StrictMode\u003E,\n document.getElementById(\"root\"),\n)\n","id":"mod_jdBuMKEpBL7EKeCiomaGw","is_binary":false,"title":"index.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-08T01:14:20","upload_id":null,"shortid":"HyBeBajBibt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyxSToBs-F"},{"code":"{ \"frames\": {\n \"char 0.png\": {\n \"frame\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 1.png\": {\n \"frame\": { \"x\": 16, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 2.png\": {\n \"frame\": { \"x\": 32, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 3.png\": {\n \"frame\": { \"x\": 48, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 4.png\": {\n \"frame\": { \"x\": 64, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 5.png\": {\n \"frame\": { \"x\": 80, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 6.png\": {\n \"frame\": { \"x\": 96, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 7.png\": {\n \"frame\": { \"x\": 112, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 8.png\": {\n \"frame\": { \"x\": 128, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n }\n },\n \"meta\": {\n \"app\": \"http:\u002F\u002Fwww.aseprite.org\u002F\",\n \"version\": \"1.2.15\",\n \"image\": \"char-sheet.png\",\n \"format\": \"RGBA8888\",\n \"size\": { \"w\": 144, \"h\": 16 },\n \"scale\": \"1\",\n \"frameTags\": [\n ],\n \"layers\": [\n { \"name\": \"Layer\", \"opacity\": 255, \"blendMode\": \"normal\" }\n ],\n \"slices\": [\n ]\n }\n}\n","id":"mod_W85n4bYE6CA2r3cyNhEEBy","is_binary":false,"title":"char.json","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-26T03:35:14","upload_id":null,"shortid":"HycdPgHIt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"G66pK"},{"code":"{ \"frames\": {\n \"frog 0.png\": {\n \"frame\": { \"x\": 0, \"y\": 0, \"w\": 17, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 17, \"h\": 16 },\n \"sourceSize\": { \"w\": 17, \"h\": 16 },\n \"duration\": 100\n },\n \"frog 1.png\": {\n \"frame\": { \"x\": 17, \"y\": 0, \"w\": 17, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 17, \"h\": 16 },\n \"sourceSize\": { \"w\": 17, \"h\": 16 },\n \"duration\": 100\n }\n },\n \"meta\": {\n \"app\": \"http:\u002F\u002Fwww.aseprite.org\u002F\",\n \"version\": \"1.2.15\",\n \"image\": \"frog-sheet.png\",\n \"format\": \"RGBA8888\",\n \"size\": { \"w\": 34, \"h\": 16 },\n \"scale\": \"1\",\n \"frameTags\": [\n ],\n \"layers\": [\n { \"name\": \"Layer\", \"opacity\": 255, \"blendMode\": \"normal\" }\n ],\n \"slices\": [\n ]\n }\n}\n","id":"mod_24xgDK6yCUhjuLdY6dMREZ","is_binary":false,"title":"frog.json","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-26T03:35:14","upload_id":null,"shortid":"SJg9uwxrIK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"G66pK"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002F2mBW-char-sheet.png","id":"mod_MH4JbfLmB97BsFGfLc1DLa","is_binary":true,"title":"char-sheet.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-26T03:35:14","upload_id":null,"shortid":"rkbcdDgHIF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"G66pK"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002F3ykL-frog-sheet.png","id":"mod_Ddabm35SQwGgYiHSkHcsCP","is_binary":true,"title":"frog-sheet.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-26T03:35:14","upload_id":null,"shortid":"HJf9OvxHUt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"G66pK"},{"code":"{\n \"compilerOptions\": {\n \"target\": \"es5\",\n \"lib\": [\n \"es2017\",\n \"dom\",\n \"dom.iterable\",\n \"esnext\"\n ],\n \"allowJs\": true,\n \"skipLibCheck\": true,\n \"esModuleInterop\": true,\n \"allowSyntheticDefaultImports\": true,\n \"strict\": true,\n \"forceConsistentCasingInFileNames\": true,\n \"noFallthroughCasesInSwitch\": true,\n \"module\": \"esnext\",\n \"moduleResolution\": \"node\",\n \"resolveJsonModule\": true,\n \"isolatedModules\": true,\n \"noEmit\": true,\n \"jsx\": \"react-jsx\"\n },\n \"include\": [\n \"src\"\n ]\n}","id":"mod_Mrk7B6W1miJzDGp2gvmU5p","is_binary":false,"title":"tsconfig.json","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-26T04:41:19","upload_id":null,"shortid":"H1PbrpsSj-K","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":null},{"code":"{ \"frames\": {\n \"bomber 0.aseprite\": {\n \"frame\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 1.aseprite\": {\n \"frame\": { \"x\": 40, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 2.aseprite\": {\n \"frame\": { \"x\": 80, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 3.aseprite\": {\n \"frame\": { \"x\": 120, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 4.aseprite\": {\n \"frame\": { \"x\": 160, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 5.aseprite\": {\n \"frame\": { \"x\": 200, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 6.aseprite\": {\n \"frame\": { \"x\": 240, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 7.aseprite\": {\n \"frame\": { \"x\": 280, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 8.aseprite\": {\n \"frame\": { \"x\": 320, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 9.aseprite\": {\n \"frame\": { \"x\": 360, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 10.aseprite\": {\n \"frame\": { \"x\": 400, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 11.aseprite\": {\n \"frame\": { \"x\": 440, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 12.aseprite\": {\n \"frame\": { \"x\": 480, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 13.aseprite\": {\n \"frame\": { \"x\": 520, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 14.aseprite\": {\n \"frame\": { \"x\": 560, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 15.aseprite\": {\n \"frame\": { \"x\": 600, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 16.aseprite\": {\n \"frame\": { \"x\": 640, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 17.aseprite\": {\n \"frame\": { \"x\": 680, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 18.aseprite\": {\n \"frame\": { \"x\": 720, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 19.aseprite\": {\n \"frame\": { \"x\": 760, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 20.aseprite\": {\n \"frame\": { \"x\": 800, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 21.aseprite\": {\n \"frame\": { \"x\": 840, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 22.aseprite\": {\n \"frame\": { \"x\": 880, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 23.aseprite\": {\n \"frame\": { \"x\": 920, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n }\n },\n \"meta\": {\n \"app\": \"http:\u002F\u002Fwww.aseprite.org\u002F\",\n \"version\": \"1.2.15\",\n \"image\": \"bomber.png\",\n \"format\": \"RGBA8888\",\n \"size\": { \"w\": 960, \"h\": 40 },\n \"scale\": \"1\",\n \"frameTags\": [\n { \"name\": \"idle\", \"from\": 0, \"to\": 5, \"direction\": \"forward\" },\n { \"name\": \"boom\", \"from\": 6, \"to\": 23, \"direction\": \"forward\" }\n ],\n \"layers\": [\n { \"name\": \"Layer 1\", \"opacity\": 255, \"blendMode\": \"normal\" }\n ],\n \"slices\": [\n ]\n }\n}\n","id":"mod_PnxXrtQ5bn4sh9ai3CtaTb","is_binary":false,"title":"bomber.json","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-26T04:51:20","upload_id":null,"shortid":"r1JLYWH8t","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"G66pK"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FUfe1-bomber.png","id":"mod_5H5vz2wme5Q6hS2GPw8qbu","is_binary":true,"title":"bomber.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-26T04:51:20","upload_id":null,"shortid":"r1g18FWHUK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"G66pK"},{"code":"{ \"frames\": {\n \"smiley 0.ase\": {\n \"frame\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 1.ase\": {\n \"frame\": { \"x\": 36, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 2.ase\": {\n \"frame\": { \"x\": 72, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 3.ase\": {\n \"frame\": { \"x\": 108, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 4.ase\": {\n \"frame\": { \"x\": 144, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 5.ase\": {\n \"frame\": { \"x\": 180, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 6.ase\": {\n \"frame\": { \"x\": 216, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 7.ase\": {\n \"frame\": { \"x\": 252, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 8.ase\": {\n \"frame\": { \"x\": 288, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 9.ase\": {\n \"frame\": { \"x\": 324, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 10.ase\": {\n \"frame\": { \"x\": 360, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 11.ase\": {\n \"frame\": { \"x\": 396, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 12.ase\": {\n \"frame\": { \"x\": 432, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 13.ase\": {\n \"frame\": { \"x\": 468, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 14.ase\": {\n \"frame\": { \"x\": 504, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 15.ase\": {\n \"frame\": { \"x\": 540, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 16.ase\": {\n \"frame\": { \"x\": 576, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 17.ase\": {\n \"frame\": { \"x\": 612, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n }\n },\n \"meta\": {\n \"app\": \"http:\u002F\u002Fwww.aseprite.org\u002F\",\n \"version\": \"1.2.15\",\n \"image\": \"smiley.png\",\n \"format\": \"RGBA8888\",\n \"size\": { \"w\": 648, \"h\": 36 },\n \"scale\": \"1\",\n \"frameTags\": [\n { \"name\": \"idle\", \"from\": 0, \"to\": 7, \"direction\": \"forward\" },\n { \"name\": \"wink\", \"from\": 8, \"to\": 17, \"direction\": \"forward\" }\n ],\n \"layers\": [\n { \"name\": \"Layer\", \"opacity\": 255, \"blendMode\": \"normal\" },\n { \"name\": \"Layer 1\", \"opacity\": 255, \"blendMode\": \"normal\" }\n ],\n \"slices\": [\n ]\n }\n}\n","id":"mod_VGobM92RGCprMzSyiZeXdM","is_binary":false,"title":"smiley.json","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-26T05:48:31","upload_id":null,"shortid":"SkwnIzHLt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"G66pK"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FfHMz-smiley.png","id":"mod_Ubcta8mxknMnSmgHs9wU4H","is_binary":true,"title":"smiley.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-26T05:48:31","upload_id":null,"shortid":"HkeDnUMrUt","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"G66pK"},{"code":"import { useFrame, useLoader } from \"@react-three\u002Ffiber\"\nimport { MutableRefObject, useEffect, useRef } from \"react\"\nimport * as THREE from \"three\"\n\ntype AsepriteFrame = {\n frame: {\n x: number\n y: number\n w: number\n h: number\n }\n rotated: boolean\n trimmed: boolean\n spriteSourceSize: {\n x: number\n y: number\n w: number\n h: number\n }\n sourceSize: {\n w: number\n h: number\n }\n duration: number\n}\n\ntype AsepriteLayer = {\n name: string\n opacity: number\n blendMode: string\n}\n\ntype AsepriteFrameTag = { name: string; from: number; to: number; direction: \"forward\" | \"backward\" }\n\ntype AsepriteJson = {\n frames: { [name: string]: AsepriteFrame }\n meta: {\n app: string\n version: string\n image: string\n format: string\n size: {\n w: number\n h: number\n }\n frameTags: AsepriteFrameTag[]\n layers: AsepriteLayer[]\n slices: unknown[]\n }\n}\n\nfunction frameList(json: AsepriteJson): AsepriteFrame[] {\n return Object.values(json.frames)\n}\n\nfunction getAnimationFrames(json: AsepriteJson, name: string): AsepriteFrame[] {\n const tag = json.meta.frameTags.find((t) =\u003E t.name === name)\n if (!tag) return []\n\n const allFrames = frameList(json)\n return allFrames.slice(tag.from, tag.to)\n}\n\nexport function useAseprite(src: string, json: AsepriteJson, currentAnimation: string | null = null) {\n const texture: THREE.Texture = useLoader(THREE.TextureLoader, src)\n\n \u002F\u002F We'll be animating these independently, clone the texture\n const tex = texture.clone()\n tex.wrapS = tex.wrapT = THREE.RepeatWrapping\n tex.minFilter = THREE.NearestFilter\n tex.magFilter = THREE.NearestFilter\n tex.needsUpdate = true\n\n const frames: MutableRefObject\u003CAsepriteFrame[]\u003E = useRef([])\n\n const w = json.meta.size.w\n const h = json.meta.size.h\n\n const t = useRef(0)\n const index = useRef(0)\n\n useEffect(() =\u003E {\n t.current = 0\n index.current = 0\n\n if (currentAnimation) {\n frames.current = getAnimationFrames(json, currentAnimation)\n } else {\n frames.current = frameList(json)\n }\n }, [currentAnimation, texture, json])\n\n useFrame((_, delta) =\u003E {\n t.current += delta * 1000\n const f = frames.current[index.current]\n if (!f) return\n\n tex.repeat.set(f.frame.w \u002F w, f.frame.h \u002F h)\n\n if (t.current \u003E= f.duration) {\n index.current += 1\n if (index.current \u003E= frames.current.length) {\n index.current = 0\n }\n\n t.current = 0\n\n tex.offset.x = f.frame.x \u002F w\n tex.offset.y = f.frame.h \u002F h\n }\n })\n\n return tex\n}\n","id":"mod_48FKLKVYoA7EuLr5VYEcKZ","is_binary":false,"title":"useAseprite.ts","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-10-26T05:47:00","upload_id":null,"shortid":"X29DA","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyxSToBs-F"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FS4sy-smiley.png","id":"mod_B9Nbi51ANRjivPjQYoRrGJ","is_binary":true,"title":"smiley.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-11-01T05:37:54","upload_id":null,"shortid":"S1cEaeTIF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"{\n \"name\": \"basic-textured-quad-nearest-neighbour\",\n \"version\": \"1.0.0\",\n \"description\": \"isometric 3d scene with billboard sprites\",\n \"keywords\": [],\n \"main\": \"src\u002Findex.js\",\n \"dependencies\": {\n \"@react-three\u002Fdrei\": \"7.12.6\",\n \"@react-three\u002Ffiber\": \"7.0.10\",\n \"leva\": \"0.9.14\",\n \"react\": \"17.0.2\",\n \"react-dom\": \"17.0.2\",\n \"react-scripts\": \"4.0.3\",\n \"react-spring\": \"9.2.6\",\n \"simplex-noise\": \"2.4.0\",\n \"three\": \"0.127.0\",\n \"use-control\": \"0.2.1\"\n },\n \"devDependencies\": {\n \"@types\u002Freact\": \"^17.0.19\",\n \"@types\u002Fthree\": \"^0.131.0\",\n \"typescript\": \"^4.3.5\"\n },\n \"scripts\": {\n \"start\": \"react-scripts start\",\n \"build\": \"react-scripts build\",\n \"test\": \"react-scripts test --env=jsdom\",\n \"eject\": \"react-scripts eject\"\n },\n \"browserslist\": [\n \"\u003E0.2%\",\n \"not dead\",\n \"not ie \u003C= 11\",\n \"not op_mini all\"\n ]\n}","id":"mod_2UpoM4c9iRjuvPv7FpRmmy","is_binary":false,"title":"package.json","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-11-05T04:24:13","upload_id":null,"shortid":"rkiHpsBo-t","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":null},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FZGwu-smiley-static.png","id":"mod_DgFXVG7GPJd6paU2cPDdgR","is_binary":true,"title":"smiley-static.png","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-11-04T05:29:10","upload_id":null,"shortid":"SJ0jJxbvF","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyHH6jSsbt"},{"code":"import { OrbitControls, OrthographicCamera } from \"@react-three\u002Fdrei\"\nimport { Canvas, useFrame, useLoader } from \"@react-three\u002Ffiber\"\nimport { useControls } from \"leva\"\nimport React, { Suspense, useRef } from \"react\"\nimport * as THREE from \"three\"\nimport bard from \".\u002Fresources\u002Fsmiley-static.png\"\n\nconst TexturedQuad = ({ src, scale, position }) =\u003E {\n const ref = useRef()\n\n const texture = useLoader(THREE.TextureLoader, src)\n texture.minFilter = THREE.NearestFilter\n texture.magFilter = THREE.NearestFilter\n\n const [{ wireframe, sides, filter }, set] = useControls(() =\u003E ({\n wireframe: false,\n sides: {\n value: THREE.DoubleSide,\n options: {\n \"front side\": THREE.FrontSide,\n \"back side\": THREE.BackSide,\n \"both sides\": THREE.DoubleSide,\n },\n },\n filter: {\n value: THREE.NearestFilter,\n options: {\n \"nearest-neighbour\": THREE.NearestFilter,\n bilinear: THREE.LinearFilter,\n },\n },\n }))\n\n useFrame(() =\u003E {\n texture.magFilter = filter\n texture.minFilter = filter\n texture.needsUpdate = true\n\n if (ref.current) {\n ref.current.rotation.y += 0.01\n }\n })\n\n return (\n \u003Cmesh ref={ref}\u003E\n \u003CplaneBufferGeometry attach=\"geometry\" args={[1, 1]} \u002F\u003E\n {wireframe ? (\n \u003CmeshBasicMaterial side={sides} attach=\"material\" wireframe map={null} transparent={false} \u002F\u003E\n ) : (\n \u003CmeshBasicMaterial side={sides} transparent={true} attach=\"material\" map={texture} \u002F\u003E\n )}\n \u003C\u002Fmesh\u003E\n )\n}\n\nfunction Room() {\n return (\n \u003C\u003E\n \u003CTexturedQuad scale={[1, 1, 1]} src={bard} \u002F\u003E\n \u003C\u002F\u003E\n )\n}\n\nexport default function App() {\n return (\n \u003CCanvas\u003E\n \u003Ccolor attach=\"background\" args={[\"black\"]} \u002F\u003E\n {\u002F* \u003CSky azimuth={1} inclination={0.1} distance={1000} \u002F\u003E *\u002F}\n \u003COrthographicCamera makeDefault position={[0, 0, 5]} zoom={400} \u002F\u003E\n \u003CambientLight intensity={0.1} \u002F\u003E\n \u003CpointLight position={[10, 10, 10]} \u002F\u003E\n \u003CSuspense fallback={null}\u003E\n \u003CRoom \u002F\u003E\n \u003C\u002FSuspense\u003E\n \u003COrbitControls minPolarAngle={Math.PI \u002F 10} maxPolarAngle={Math.PI \u002F 1.5} \u002F\u003E\n \u003C\u002FCanvas\u003E\n )\n}\n","id":"mod_Kwp4uNdZvs12oveHwHvHgD","is_binary":false,"title":"App.js","sha":null,"inserted_at":"2021-11-05T04:22:31","updated_at":"2021-12-06T05:14:04","upload_id":null,"shortid":"SJCBporsbK","source_id":"src_SoS4DpLFqVZmS5jGaoN6tn","directory_shortid":"HyxSToBs-F"}],"picks":[],"alias":"basic-textured-quad-nearest-neighbour-pkbjf","custom_template":null,"v2":false,"room_id":null,"owned":false,"authorization":"read","preview_secret":null,"external_resources":[],"free_plan_editing_restricted":false,"privacy":0,"restricted":false,"template":"create-react-app","forked_template_sandbox":{"alias":"shell-rxjs-useanimation-bomlk","id":"bomlk","title":"shell-rxjs-useanimation","template":"create-react-app","inserted_at":"2021-08-31T06:27:09","updated_at":"2021-09-17T07:41:00","git":{"path":"","branch":"main","repo":"use-animation","username":"bfollington","commit_sha":"67eb82be264ca13907e493617cd0d2c5106d51f4"},"privacy":0,"custom_template":{"id":"sbtempl_P225DqK436mneWY5GnwpZw","title":"shell-rxjs-useanimation","color":"#61DAFB","v2":false,"url":null,"published":false,"icon_url":"github","official":false}},"updated_at":"2021-12-06T05:14:04","forked_template":{"id":"sbtempl_P225DqK436mneWY5GnwpZw","title":"shell-rxjs-useanimation","color":"#61DAFB","v2":false,"url":null,"published":false,"icon_url":"github","official":false},"id":"pkbjf","is_frozen":false,"view_count":24734,"ai_consent":false,"base_git":null,"draft":false,"npm_dependencies":{},"screenshot_url":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002Feha5-thumbnail.png","forked_from_sandbox":{"alias":"basic-textured-quad-2161n","id":"2161n","title":"basic textured quad","template":"create-react-app","inserted_at":"2021-11-04T05:28:25","updated_at":"2021-11-05T01:57:05","git":null,"privacy":0,"custom_template":null},"original_git_commit_sha":"f84a1138106c8d05040d06b91d7be7802e344c9a"};