, `keyup
texture UV slicing animation - CodeSandbox, `keytexture UV slicing animation - CodeSandbox, `mousemovetexture UV slicing animation - CodeSandbox, `mousemovenormalisedtexture UV slicing animation - CodeSandbox and `mousemovedeltatexture UV slicing animation - CodeSandbox.\n\n```tsx\nconst angle = useObservable(0)\n \n\u002F\u002F ...\n\nuseEffect(() =\u003E {\n const s = combineLatest([\n mousemovenormalised$.pipe(\n sampleTime(100),\n map(([x]) =\u003E Math.PI * 2 * Math.sin(x * Math.PI)),\n ),\n interval(100),\n ])\n .pipe(map(([angle, time]) =\u003E angle + time \u002F 5.0))\n .subscribe(angle.set)\n return () =\u003E s.unsubscribe()\n}, [angle])\n```\n","id":"516c6d87-3a44-4949-bc12-69ae7bd9e643","is_binary":false,"title":"README.md","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SkcH6sHiWK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":null},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fdither_matrix.png","id":"28d6dcbf-d572-4170-93e4-5cbe05e7fca2","is_binary":true,"title":"dither_matrix.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rJJMBpjHoZt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_rising_sun.png","id":"d7f23d7f-2ec0-467e-a2a6-89d34f0ec41f","is_binary":true,"title":"palette_rising_sun.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BkxMSpsHs-Y","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_moonlight.png","id":"edbccf69-c120-4063-a8a1-9397f437b243","is_binary":true,"title":"palette_moonlight.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BkbzS6oSobt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fblue_noise.png","id":"20cc5406-f7f8-4b83-be63-adf79422842d","is_binary":true,"title":"blue_noise.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HkMGHasrjZF","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fblade_diffuse.jpg","id":"6ce72406-fcde-42ac-8e9f-99778a9712f0","is_binary":true,"title":"blade_diffuse.jpg","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"ry7GH6iribK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fseamless7.png","id":"e1063849-5a5d-4300-9921-834563c25361","is_binary":true,"title":"seamless7.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"B1VGHTjBoZt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_twilight.png","id":"69fb1e2a-bc39-45fc-afed-9674f65abac5","is_binary":true,"title":"palette_twilight.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"B1SGS6oriWF","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fblade_alpha.jpg","id":"3f410742-70d8-4089-ba83-f33f3a9eea66","is_binary":true,"title":"blade_alpha.jpg","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BJIzBTjrs-Y","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fwater-0397b.jpeg","id":"6be3b485-95da-490d-a2d1-b34fa6b5be4d","is_binary":true,"title":"water-0397b.jpeg","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HyPGB6jrs-t","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fseamless6.png","id":"631bf3ef-77b1-48c5-89d0-a10ee2d8bbf3","is_binary":true,"title":"seamless6.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"ByOzrpirjZt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fseamless8.png","id":"d27b758c-0bd3-4737-aa80-d8c00fcff60d","is_binary":true,"title":"seamless8.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BkFfH6jrsbt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fseamless5.png","id":"95393575-2faa-44e9-b111-5adc017ca542","is_binary":true,"title":"seamless5.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BJcGHasSj-Y","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"{\n \"extends\": \"react-app\",\n \"rules\": {\n \"semi\": [2, \"never\"],\n \"no-unexpected-multiline\": \"error\"\n }\n }","id":"08cba470-2aee-488c-bf42-9a78b1222867","is_binary":false,"title":".eslintrc","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"Bk8SaiSoWK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":null},{"code":"node_modules\nyarn.lock\n","id":"a1a3af3c-527e-4577-894f-731617bf6f7e","is_binary":false,"title":".gitignore","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BkDS6iBiWF","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":null},{"code":"{\n \"printWidth\": 160,\n \"tabWidth\": 2,\n \"useTabs\": false,\n \"semi\": false,\n \"singleQuote\": false,\n \"trailingComma\": \"all\",\n \"bracketSpacing\": true,\n \"jsxBracketSameLine\": true,\n \"fluid\": false\n}","id":"f7f81082-f093-4bce-9f30-9d3d25a84c16","is_binary":false,"title":".prettierrc","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"r1OHTjri-K","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":null},{"code":"import * as THREE from \"three\"\nimport { shaderMaterial } from \"@react-three\u002Fdrei\"\nimport { extend } from \"@react-three\u002Ffiber\"\n\nconst ShinyMaterial = shaderMaterial(\n { baseSpeed: 0.05, noiseTexture: null, noiseScale: 0.5337, alpha: 1.0, time: 10.0 },\n `\n varying vec2 vUv;\n void main() {\n vUv = uv;\n gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n }\n `,\n `\n uniform sampler2D baseTexture;\n uniform float baseSpeed;\n uniform sampler2D noiseTexture;\n uniform float noiseScale;\n uniform float alpha;\n uniform float time;\n uniform vec2 resolution;\n\n varying vec2 vUv;\n void main()\n {\n vec2 uvTimeShift = vUv + vec2( -0.3, 1.5 ) * time * baseSpeed;\n vec4 noiseGeneratorTimeShift = texture2D( noiseTexture, uvTimeShift );\n vec2 uvNoiseTimeShift = vUv + noiseScale * vec2( noiseGeneratorTimeShift.r, noiseGeneratorTimeShift.b );\n vec4 baseColor = texture2D( baseTexture, uvNoiseTimeShift );\n\n baseColor.a = alpha;\n gl_FragColor = baseColor;\n }\n `,\n (self) =\u003E {\n self.side = THREE.DoubleSide\n },\n)\n\nextend({ ShinyMaterial })\n","id":"897bc63f-4d1c-458b-b15a-750ed52977da","is_binary":false,"title":"ShinyMaterial.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HJIlBpoBoWt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"ByZHpsHjbt"},{"code":"\u002F\u002F\u002F \u003Creference types=\"react-scripts\" \u002F\u003E\n","id":"35663e50-37ff-467e-aa6f-6820fb0fa9bb","is_binary":false,"title":"react-app-env.d.ts","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HkPlr6orsWK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyxSToBs-F"},{"code":"import { Mesh, OrthographicCamera, PlaneBufferGeometry, Scene, ShaderMaterial, UniformsUtils, Vector2 } from \"three\"\nimport { DIMENSIONS } from \"..\u002Fconst\"\n\nexport const CrtShader = {\n uniforms: {\n tDiffuse: { value: null },\n iResolution: {\n type: \"vec2\",\n value: new Vector2(DIMENSIONS.width - 3, DIMENSIONS.height - 2),\n },\n },\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n \u002F\u002F #include \u003Ccommon\u003E\n\t\tuniform sampler2D tDiffuse;\n\t\tvarying vec2 vUv;\n uniform vec2 iResolution;\n\n \u002F\u002F\n\u002F\u002F PUBLIC DOMAIN CRT STYLED SCAN-LINE SHADER\n\u002F\u002F\n\u002F\u002F by Timothy Lottes\n\u002F\u002F\n\u002F\u002F This is more along the style of a really good CGA arcade monitor.\n\u002F\u002F With RGB inputs instead of NTSC.\n\u002F\u002F The shadow mask example has the mask rotated 90 degrees for less chromatic aberration.\n\u002F\u002F\n\u002F\u002F Left it unoptimized to show the theory behind the algorithm.\n\u002F\u002F\n\u002F\u002F It is an example what I personally would want as a display option for pixel art games.\n\u002F\u002F Please take and use, change, or whatever.\n\u002F\u002F\n\n\u002F\u002F Emulated input resolution.\n#if 0\n \u002F\u002F Fix resolution to set amount.\n #define res (vec2(320.0\u002F1.0,160.0\u002F1.0))\n#else\n \u002F\u002F Optimize for resize.\n #define res (iResolution.xy\u002F6.0)\n#endif\n\n\u002F\u002F Hardness of scanline.\n\u002F\u002F -8.0 = soft\n\u002F\u002F -16.0 = medium\nfloat hardScan=-8.0;\n\n\u002F\u002F Hardness of pixels in scanline.\n\u002F\u002F -2.0 = soft\n\u002F\u002F -4.0 = hard\nfloat hardPix=-3.0;\n\n\u002F\u002F Display warp.\n\u002F\u002F 0.0 = none\n\u002F\u002F 1.0\u002F8.0 = extreme\nvec2 warp=vec2(1.0\u002F32.0,1.0\u002F24.0); \n\n\u002F\u002F Amount of shadow mask.\nfloat maskDark=0.5;\nfloat maskLight=1.5;\n\n\u002F\u002F------------------------------------------------------------------------\n\n\u002F\u002F sRGB to Linear.\n\u002F\u002F Assuing using sRGB typed textures this should not be needed.\nfloat ToLinear1(float c){return(c\u003C=0.04045)?c\u002F12.92:pow((c+0.055)\u002F1.055,2.4);}\nvec3 ToLinear(vec3 c){return vec3(ToLinear1(c.r),ToLinear1(c.g),ToLinear1(c.b));}\n\n\u002F\u002F Linear to sRGB.\n\u002F\u002F Assuing using sRGB typed textures this should not be needed.\nfloat ToSrgb1(float c){return(c\u003C0.0031308?c*12.92:1.055*pow(c,0.41666)-0.055);}\nvec3 ToSrgb(vec3 c){return vec3(ToSrgb1(c.r),ToSrgb1(c.g),ToSrgb1(c.b));}\n\n\u002F\u002F Nearest emulated sample given floating point position and texel offset.\n\u002F\u002F Also zero's off screen.\nvec3 Fetch(vec2 pos,vec2 off){\n pos=floor(pos*res+off)\u002Fres;\n if(max(abs(pos.x-0.5),abs(pos.y-0.5))\u003E0.5)return vec3(0.0,0.0,0.0);\n return ToLinear(texture2D(tDiffuse,pos.xy,-16.0).rgb);}\n\n\u002F\u002F Distance in emulated pixels to nearest texel.\nvec2 Dist(vec2 pos){pos=pos*res;return -((pos-floor(pos))-vec2(0.5));}\n \n\u002F\u002F 1D Gaussian.\nfloat Gaus(float pos,float scale){return exp2(scale*pos*pos);}\n\n\u002F\u002F 3-tap Gaussian filter along horz line.\nvec3 Horz3(vec2 pos,float off){\n vec3 b=Fetch(pos,vec2(-1.0,off));\n vec3 c=Fetch(pos,vec2( 0.0,off));\n vec3 d=Fetch(pos,vec2( 1.0,off));\n float dst=Dist(pos).x;\n \u002F\u002F Convert distance to weight.\n float scale=hardPix;\n float wb=Gaus(dst-1.0,scale);\n float wc=Gaus(dst+0.0,scale);\n float wd=Gaus(dst+1.0,scale);\n \u002F\u002F Return filtered sample.\n return (d*wd)\u002F(wb+wc+wd);}\n\n\u002F\u002F 5-tap Gaussian filter along horz line.\nvec3 Horz5(vec2 pos,float off){\n vec3 a=Fetch(pos,vec2(-2.0,off));\n vec3 b=Fetch(pos,vec2(-1.0,off));\n vec3 c=Fetch(pos,vec2( 0.0,off));\n vec3 d=Fetch(pos,vec2( 1.0,off));\n vec3 e=Fetch(pos,vec2( 2.0,off));\n float dst=Dist(pos).x;\n \u002F\u002F Convert distance to weight.\n float scale=hardPix;\n float wa=Gaus(dst-2.0,scale);\n float wb=Gaus(dst-1.0,scale);\n float wc=Gaus(dst+0.0,scale);\n float wd=Gaus(dst+1.0,scale);\n float we=Gaus(dst+2.0,scale);\n \u002F\u002F Return filtered sample.\n return (a*wa+b*wb+c*wc+d*wd+e*we)\u002F(wa+wb+wc+wd+we);}\n\n\u002F\u002F Return scanline weight.\nfloat Scan(vec2 pos,float off){\n float dst=Dist(pos).y;\n return Gaus(dst+off,hardScan);}\n\n\u002F\u002F Allow nearest three lines to effect pixel.\nvec3 Tri(vec2 pos){\n vec3 a=Horz3(pos,-1.0);\n vec3 b=Horz5(pos, 0.0);\n vec3 c=Horz3(pos, 1.0);\n float wa=Scan(pos,-1.0);\n float wb=Scan(pos, 0.0);\n float wc=Scan(pos, 1.0);\n return a*wa+b*wb+c*wc;}\n\n\u002F\u002F Distortion of scanlines, and end of screen alpha.\nvec2 Warp(vec2 pos){\n pos=pos*2.0-1.0; \n pos*=vec2(1.0+(pos.y*pos.y)*warp.x,1.0+(pos.x*pos.x)*warp.y);\n return pos*0.5+0.5;}\n\n\u002F\u002F Shadow mask.\nvec3 Mask(vec2 pos){\n pos.x+=pos.y*3.0;\n\n vec3 mask=vec3(maskDark,maskDark,maskDark);\n pos.x=fract(pos.x\u002F6.0);\n\n\n\n if(pos.x\u003C0.333333333)mask.r=maskLight;\n else if(pos.x\u003C0.66666666)mask.g=maskLight;\n else mask.b=maskLight;\n return mask;} \n\n\u002F\u002F Draw dividing bars.\nfloat Bar(float pos,float bar){pos-=bar;return pos*pos\u003C4.0?0.0:1.0;}\n\n\u002F\u002F Entry.\nvoid main(){\n vec2 fragCoord = vUv * iResolution;\n\n vec2 pos=Warp(fragCoord.xy\u002FiResolution.xy+vec2(0.0,0.0));\n\n gl_FragColor.rgb=Tri(pos)*Mask(fragCoord.xy);\n\n gl_FragColor.a=1.0; \n gl_FragColor.rgb=ToSrgb(gl_FragColor.rgb);\n\n }\n\n \n\n\n `,\n}\n","id":"e48e72d8-8b45-4b79-8f89-ada705a69cee","is_binary":false,"title":"CrtShader.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"ByuxHTiBj-Y","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"SJMBpiBsWt"},{"code":"export const DitherShader = {\n uniforms: {\n tDiffuse: { type: \"t\", value: null },\n tDither: { type: \"t\", value: null },\n tPalette: { type: \"t\", value: null },\n\n u_bit_depth: { type: \"i\", value: 32 },\n u_contrast: { value: 1 },\n u_offset: { value: 0 },\n u_dither_size: { type: \"i\", value: 2 },\n },\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n \u002F* \n This shader is under MIT license. Feel free to use, improve and \n change this shader according to your needs and consider sharing \n the modified result to godotshaders.com.\n *\u002F\n\n uniform sampler2D tDiffuse;\n\t\tvarying vec2 vUv;\n\n uniform sampler2D tDither;\n uniform sampler2D tPalette;\n\n uniform int u_bit_depth;\n uniform float u_contrast;\n uniform float u_offset;\n uniform int u_dither_size;\n\n void main() \n {\n \u002F\u002F sample the screen texture at the desired output resolution (according to u_dither_size)\n \u002F\u002F this will effectively pixelate the resulting output\n vec2 screen_size = vec2(textureSize(tDiffuse, 0)) \u002F float(u_dither_size);\n vec2 screen_sample_uv = floor(vUv * screen_size) \u002F screen_size;\n vec3 screen_col = texture(tDiffuse, screen_sample_uv).rgb;\n \n \u002F\u002F calculate pixel luminosity (https:\u002F\u002Fstackoverflow.com\u002Fquestions\u002F596216\u002Fformula-to-determine-brightness-of-rgb-color)\n float lum = (screen_col.r * 0.299) + (screen_col.g * 0.587) + (screen_col.b * 0.114);\n \n \u002F\u002F adjust with contrast and offset parameters\n float contrast = u_contrast;\n lum = (lum - 0.5 + u_offset) * contrast + 0.5;\n lum = clamp(lum, 0.0, 1.0);\n \n \u002F\u002F reduce luminosity bit depth to give a more banded visual if desired\t\n float bits = float(u_bit_depth);\n lum = floor(lum * bits) \u002F bits;\n \n \u002F\u002F to support multicolour palettes, we want to dither between the two colours on the palette\n \u002F\u002F which are adjacent to the current pixel luminosity.\n \u002F\u002F to do this, we need to determine which 'band' lum falls into, calculate the upper and lower\n \u002F\u002F bound of that band, then later we will use the dither texture to pick either the upper or \n \u002F\u002F lower colour.\n \n \u002F\u002F get the palette texture size mapped so it is 1px high (so the x value however many colour bands there are)\n ivec2 col_size = textureSize(tPalette, 0);\n ivec2 test = col_size;\n col_size \u002F= col_size.y;\n \n float col_x = float(col_size.x) - 1.0; \u002F\u002F colour boundaries is 1 less than the number of colour bands\n float col_texel_size = 1.0 \u002F col_x; \u002F\u002F the size of one colour boundary\n \n lum = max(lum - 0.00001, 0.0); \u002F\u002F makes sure our floor calculation below behaves when lum == 1.0\n float lum_lower = floor(lum * col_x) * col_texel_size;\n float lum_upper = (floor(lum * col_x) + 1.0) * col_texel_size;\n float lum_scaled = lum * col_x - floor(lum * col_x); \u002F\u002F calculates where lum lies between the upper and lower bound\n \n \u002F\u002F map the dither texture onto the screen. there are better ways of doing this that makes the dither pattern 'stick'\n \u002F\u002F with objects in the 3D world, instead of being mapped onto the screen. see lucas pope's details posts on how he \n \u002F\u002F achieved this in Obra Dinn: https:\u002F\u002Fforums.tigsource.com\u002Findex.php?topic=40832.msg1363742#msg1363742\n ivec2 noise_size = textureSize(tDither, 0);\n vec2 inv_noise_size = vec2(1.0 \u002F float(noise_size.x), 1.0 \u002F float(noise_size.y));\n vec2 noise_uv = vUv * inv_noise_size * vec2(float(screen_size.x), float(screen_size.y));\n float threshold = texture(tDither, noise_uv).r;\n \n \u002F\u002F adjust the dither slightly so min and max aren't quite at 0.0 and 1.0\n \u002F\u002F otherwise we wouldn't get fullly dark and fully light dither patterns at lum 0.0 and 1.0\n threshold = threshold * 0.99 + 0.005;\n \n \u002F\u002F the lower lum_scaled is, the fewer pixels will be below the dither threshold, and thus will use the lower bound colour,\n \u002F\u002F and vice-versa\n float ramp_val = lum_scaled \u003C threshold ? 0.0f : 1.0f;\n \u002F\u002F sample at the lower bound colour if ramp_val is 0.0, upper bound colour if 1.0\n float col_sample = mix(lum_lower, lum_upper, ramp_val);\n vec3 final_col = texture(tDiffuse, vec2(col_sample, 0.5)).rgb;\n \n \u002F\u002F return the final colour!\n gl_FragColor = vec4(final_col, 1.0);\n }\n `,\n}\n","id":"a2f11e4e-9a25-497d-8b54-ff3eac976c87","is_binary":false,"title":"DitherShader.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rkKxSTjHjbt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"SJMBpiBsWt"},{"code":"export const NoiseShader = {\n uniforms: {\n tDiffuse: { type: \"t\", value: null },\n amount: { value: 0.08 },\n speed: { value: 0.5 },\n time: { value: 0 },\n },\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n uniform sampler2D tDiffuse;\n\t\tuniform float amount;\n\t\tuniform float speed;\n\t\tuniform float time;\n\n\t\tvarying vec2 vUv;\n\n\n\t\tfloat random(vec2 n, float offset ){\n\t\t\treturn .5 - fract(sin(dot(n.xy + vec2( offset, 0. ), vec2(12.9898, 78.233)))* 43758.5453);\n\t\t}\n\n\t\tvoid main() {\n\t\t\tvec3 color = texture2D(tDiffuse, vUv).rgb;\n\t\t\tcolor += vec3( amount * random( vUv, .0001 * speed * time ) );\n\t\t\tgl_FragColor = vec4(color,1.0);\n\t\t}\n `,\n}\n","id":"3077cbb2-d478-4d33-9163-0a9a0ada289f","is_binary":false,"title":"NoiseShader.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"H1qgS6iSsbt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"SJMBpiBsWt"},{"code":"export const RGBShiftShader = {\n uniforms: {\n tDiffuse: { value: null },\n amount: { value: 0.02 },\n },\n\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n\t\tuniform sampler2D tDiffuse;\n\t\tuniform float amount;\n\n\t\tvarying vec2 vUv;\n\n\t\tvoid main() {\n\n\t\t\tvec2 offset = vec2( -0.1, 0.) * amount;\n\t\t\tvec4 cr = texture2D(tDiffuse, vUv + offset);\n\t\t\tvec4 cga = texture2D(tDiffuse, vUv);\n\t\t\tvec4 cb = texture2D(tDiffuse, vUv - offset);\n\t\t\tgl_FragColor = vec4(cr.r, cga.g, cb.b, cga.a);\n\t\t}\n `,\n}\n","id":"b2099871-4d90-4cd1-9461-1bc4e3e48bfa","is_binary":false,"title":"RGBShiftShader.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BJixrasBoWF","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"SJMBpiBsWt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fbayer2tile16.png","id":"ff000aad-c43a-41e7-bfae-5b8452117862","is_binary":true,"title":"bayer2tile16.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SkcZSpjHoWF","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_mono.png","id":"28df9561-4a5a-4e11-8ed1-e6d7154e0a6e","is_binary":true,"title":"palette_mono.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SkoZH6sSsWK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_eeve.png","id":"e49d7d64-1ca0-40b0-8b41-8a40bca48930","is_binary":true,"title":"palette_eeve.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"Hyn-BajHjbt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fbayer4tile8.png","id":"3ebf2eab-b4b0-487b-9a47-fa1fb76e2aef","is_binary":true,"title":"bayer4tile8.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HyTWHaoSi-F","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fpalette_hollow.png","id":"14854543-b58c-45ab-b9eb-19ddd69b0e59","is_binary":true,"title":"palette_hollow.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SkC-rTiBsbK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"\u003C!DOCTYPE html\u003E\n\u003Chtml lang=\"en\"\u003E\n\n\u003Chead\u003E\n\t\u003Cmeta charset=\"utf-8\"\u003E\n\t\u003Cmeta name=\"viewport\" content=\"width=device-width, initial-scale=1, shrink-to-fit=no\"\u003E\n\t\u003Cmeta name=\"theme-color\" content=\"#000000\"\u003E\n\t\u003C!--\n manifest.json provides metadata used when your web app is added to the\n homescreen on Android. See https:\u002F\u002Fdevelopers.google.com\u002Fweb\u002Ffundamentals\u002Fengage-and-retain\u002Fweb-app-manifest\u002F\n --\u003E\n\t\u003Clink rel=\"manifest\" href=\"%PUBLIC_URL%\u002Fmanifest.json\"\u003E\n\t\u003Clink rel=\"shortcut icon\" href=\"%PUBLIC_URL%\u002Ffavicon.ico\"\u003E\n\t\u003C!--\n Notice the use of %PUBLIC_URL% in the tags above.\n It will be replaced with the URL of the `public` folder during the build.\n Only files inside the `public` folder can be referenced from the HTML.\n\n Unlike \"\u002Ffavicon.ico\" or \"favicon.ico\", \"%PUBLIC_URL%\u002Ffavicon.ico\" will\n work correctly both with client-side routing and a non-root public URL.\n Learn how to configure a non-root public URL by running `npm run build`.\n --\u003E\n\t\u003Ctitle\u003EReact App\u003C\u002Ftitle\u003E\n\u003C\u002Fhead\u003E\n\n\u003Cbody\u003E\n\t\u003Cnoscript\u003E\n\t\tYou need to enable JavaScript to run this app.\n\t\u003C\u002Fnoscript\u003E\n\t\u003Cdiv id=\"root\"\u003E\u003C\u002Fdiv\u003E\n\t\u003C!--\n This HTML file is a template.\n If you open it directly in the browser, you will see an empty page.\n\n You can add webfonts, meta tags, or analytics to this file.\n The build step will place the bundled scripts into the \u003Cbody\u003E tag.\n\n To begin the development, run `npm start` or `yarn start`.\n To create a production bundle, use `npm run build` or `yarn build`.\n --\u003E\n\u003C\u002Fbody\u003E\n\n\u003C\u002Fhtml\u003E","id":"e9ee50d1-b015-49e1-98d0-631ec7fe5188","is_binary":false,"title":"index.html","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SJ3rTiBsWY","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"S1rasroZY"},{"code":"{\n \"infiniteLoopProtection\": false,\n \"hardReloadOnChange\": false,\n \"view\": \"browser\"\n}\n","id":"d8ec91c9-b4af-4c5c-a5ce-fa2801ad6ae1","is_binary":false,"title":"sandbox.config.json","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rJ6S6oHsbF","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":null},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fbayer16tile2.png","id":"6ae93443-e83c-4f9f-a62f-bd44bd6fb48b","is_binary":true,"title":"bayer16tile2.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rk_-rTsSiWt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Frawcdn.githack.com\u002Fbfollington\u002Fuse-animation\u002F55eb38e591426d628c0ee6fe4a2494a3e42cedfe\u002Fsrc\u002Fresources\u002Fbayer8tile4.png","id":"dd45798b-fa5d-491f-87d2-a2534eaf6228","is_binary":true,"title":"bayer8tile4.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rkK-S6sHsWt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"import * as THREE from \"three\"\nimport React, { useRef, useMemo, useEffect, useState } from \"react\"\nimport SimplexNoise from \"simplex-noise\"\nimport { useFrame, useLoader } from \"@react-three\u002Ffiber\"\nimport bg from \".\u002Fresources\u002Fseamless8.png\"\nimport \".\u002Fmaterials\u002FShinyMaterial\"\nimport { Html } from \"@react-three\u002Fdrei\"\n\nvar RIBBON_LEN = 100 \u002F\u002Fnumber of spine point\n\nfunction createRibbonGeom() {\n \u002F\u002Fmake geometry, faces & colors for a ribbon\n var i\n\n const positions = []\n const normals = []\n const uvs = []\n const vertexColors = []\n\n \u002F\u002Fcreate verts + colors\n for (i = 0; i \u003C RIBBON_LEN; i++) {\n positions.push(new THREE.Vector3(i, i, 0))\n positions.push(new THREE.Vector3(i, i + 10, 0))\n vertexColors.push(new THREE.Color())\n vertexColors.push(new THREE.Color())\n }\n\n const indices = []\n\n \u002F\u002Fcreate faces\n for (i = 0; i \u003C RIBBON_LEN - 1; i++) {\n \u002F\u002F geom.faces.push(new THREE.Face3(i * 2, i * 2 + 1, i * 2 + 2))\n \u002F\u002F geom.faces.push(new THREE.Face3(i * 2 + 1, i * 2 + 3, i * 2 + 2))\n\n indices.push(i * 2)\n indices.push(i * 2 + 1)\n indices.push(i * 2 + 2)\n\n indices.push(i * 2 + 1)\n indices.push(i * 2 + 3)\n indices.push(i * 2 + 2)\n }\n\n return { positions, indices }\n}\n\nfunction lerp(v0, v1, t) {\n return v0 * (1 - t) + v1 * t\n}\n\nconst noise = new SimplexNoise()\n\nfunction getNoiseAngle(time, noiseId, zOffset) {\n \u002F\u002F return noise.noise3d( noiseTime, noiseId, zOffset ) * Math.PI*2;\n \u002F\u002F console.log(time, noiseId, zOffset)\n return noise.noise3D(time, noiseId, zOffset) * Math.PI * 2\n \u002F\u002F return Math.PI * 2 * (Math.sin(time) + 1)\n}\n\nexport default function Ribbon({ id = 1, color = \"red\" }) {\n const material = useRef()\n const mesh = useRef()\n const [texture] = useLoader(THREE.TextureLoader, [bg])\n texture.wrapS = texture.wrapT = THREE.RepeatWrapping\n\n const up = new THREE.Vector3(1, 0, 0)\n const direction = useRef(new THREE.Vector3(0, 0, 0))\n const normal = useRef(new THREE.Vector3(0, 0, 0))\n\n const arm1 = useRef(new THREE.Vector3())\n const arm2 = useRef(new THREE.Vector3())\n const arm3 = useRef(new THREE.Vector3())\n\n const arm1T = useRef(new THREE.Vector3())\n const arm2T = useRef(new THREE.Vector3())\n const arm3T = useRef(new THREE.Vector3())\n\n const xAxis = new THREE.Vector3(1, 0, 0)\n const yAxis = new THREE.Vector3(0, 1, 0)\n const zAxis = new THREE.Vector3(0, 0, 1)\n\n const noiseId = useRef(-1)\n const ribbonWidth = useRef(-1)\n\n const head = useRef(new THREE.Vector3(0, 0, 0))\n const prev = useRef(new THREE.Vector3(0, 0, 0))\n\n const geometry = useRef()\n\n const [debug, setDebug] = useState(\"\")\n\n const onInit = () =\u003E {\n noiseId.current = id \u002F 300\n\n \u002F\u002F generate thiccness\n ribbonWidth.current = Math.random() * 1.5 + 0.25\n if (Math.random() \u003C 0.2) {\n ribbonWidth.current = 4\n }\n\n \u002F\u002Fhead is the thing that moves, prev follows behind\n head.current = new THREE.Vector3(0, 0, 0)\n prev.current = new THREE.Vector3(0, 0, 0)\n\n \u002F\u002F construct mesh\n const geo = createRibbonGeom()\n geometry.current.setFromPoints(geo.positions)\n geometry.current.setIndex(geo.indices)\n geometry.current.computeVertexNormals()\n\n \u002F\u002Fmovement arm vectors\n var armLenFac = 1.7\n arm1.current = new THREE.Vector3(8 * armLenFac, 0, 0)\n arm2.current = new THREE.Vector3(4 * armLenFac, 0, 0)\n arm3.current = new THREE.Vector3(1.5 * armLenFac, 0, 0)\n\n onReset()\n }\n\n const onReset = () =\u003E {\n var i\n\n \u002F\u002Freset prev position\n prev.current.copy(head.current)\n\n \u002F\u002Freset mesh geom\n for (i = 0; i \u003C RIBBON_LEN; i++) {\n geometry.current.attributes.position.array[i * 6] = head.current.x\n geometry.current.attributes.position.array[i * 6 + 1] = head.current.y\n geometry.current.attributes.position.array[i * 6 + 2] = head.current.z\n geometry.current.attributes.position.array[i * 6 + 3] = head.current.x\n geometry.current.attributes.position.array[i * 6 + 4] = head.current.y\n geometry.current.attributes.position.array[i * 6 + 5] = head.current.z\n }\n\n \u002F\u002Finit colors for this ribbon\n \u002F\u002Fhue is set by noiseId\n let hue1 = (noiseId + Math.random() * 0.01) % 2\n let hue2 = (noiseId + Math.random() * 0.01) % 2\n\n if (Math.random() \u003C 0.1) {\n hue1 = Math.random()\n }\n if (Math.random() \u003C 0.1) {\n hue2 = Math.random()\n }\n\n var sat = Math.random() * 0.4 + 0.6\n var lightness = Math.random() * 0.4 + 0.2\n\n var col = new THREE.Color()\n\n \u002F\u002F TODO: fix this so the ribbons change colour along their length\n for (i = 0; i \u003C RIBBON_LEN - 1; i++) {\n \u002F\u002Fadd lightness gradient based on spine position\n col.setHSL(lerp(hue1, hue2, i \u002F RIBBON_LEN), sat, lightness)\n\n \u002F\u002F this.meshGeom.faces[i*2].color.copy(col);\n \u002F\u002F this.meshGeom.faces[i*2+1].color.copy(col);\n }\n\n geometry.current.verticesNeedUpdate = true\n geometry.current.colorsNeedUpdate = true\n }\n\n useFrame(({ clock }, delta) =\u003E {\n \u002F\u002F material.current.time += delta + Math.sin(clock.elapsedTime \u002F 10) \u002F 1000\n mesh.current.rotation.x = Math.sin(clock.elapsedTime) * (Math.PI \u002F 20)\n mesh.current.rotation.y = Math.cos(clock.elapsedTime) * (Math.PI \u002F 20)\n mesh.current.scale.y = 0.9 + 0.1 * Math.sin(clock.elapsedTime)\n mesh.current.scale.x = 0.9 + 0.1 * Math.cos(clock.elapsedTime)\n\n prev.current.copy(head.current)\n\n \u002F\u002Fmove arms\n arm1T.current.copy(arm1.current)\n arm2T.current.copy(arm2.current)\n arm3T.current.copy(arm3.current)\n\n const t = clock.elapsedTime \u002F 5.0\n\n arm1T.current.applyAxisAngle(zAxis, getNoiseAngle(t, noiseId.current, 0))\n arm1T.current.applyAxisAngle(yAxis, getNoiseAngle(t, noiseId.current, 20))\n\n arm2T.current.applyAxisAngle(zAxis, getNoiseAngle(t, noiseId.current, 50))\n arm2T.current.applyAxisAngle(xAxis, getNoiseAngle(t, noiseId.current, 70))\n\n arm3T.current.applyAxisAngle(xAxis, getNoiseAngle(t, noiseId.current, 100))\n arm3T.current.applyAxisAngle(yAxis, getNoiseAngle(t, noiseId.current, 150))\n\n \u002F\u002FMOVE HEAD\n head.current.copy(arm1T.current).add(arm2T.current).add(arm3T.current)\n\n \u002F\u002Fcalc new L + R edge positions from direction between head and prev\n direction.current.subVectors(head.current, prev.current).normalize()\n normal.current.crossVectors(direction.current, up).normalize()\n normal.current.multiplyScalar(ribbonWidth.current)\n\n \u002F\u002Fshift each 2 verts down one posn\n \u002F\u002Fe.g. copy verts (0,1) -\u003E (2,3)\n const verts = geometry.current.attributes.position.array\n for (var i = RIBBON_LEN - 1; i \u003E 0; i--) {\n verts[i * 6] = verts[(i - 1) * 6]\n verts[i * 6 + 1] = verts[(i - 1) * 6 + 1]\n verts[i * 6 + 2] = verts[(i - 1) * 6 + 2]\n verts[i * 6 + 3] = verts[(i - 1) * 6 + 3]\n verts[i * 6 + 4] = verts[(i - 1) * 6 + 4]\n verts[i * 6 + 5] = verts[(i - 1) * 6 + 5]\n }\n\n \u002F\u002Fpopulate 1st 2 verts with left and right normalHelper\n const v0 = new THREE.Vector3().copy(head.current).add(normal.current)\n verts[0] = v0.x\n verts[1] = v0.y\n verts[2] = v0.z\n const v1 = new THREE.Vector3().copy(head.current).sub(normal.current)\n verts[3] = v1.x\n verts[4] = v1.y\n verts[5] = v1.z\n\n geometry.current.attributes.position.needsUpdate = true\n geometry.current.computeFaceNormals()\n geometry.current.computeVertexNormals()\n\n \u002F\u002F setDebug(`${head.current.x}, ${head.current.y}, ${head.current.z}`)\n })\n\n useEffect(() =\u003E {\n onInit()\n }, [onInit])\n\n return (\n \u003C\u003E\n \u003Cmesh position={[0, 0, 0]} ref={mesh}\u003E\n {\u002F* \u003CshinyMaterial ref={material} attach=\"material\" noiseTexture={texture} \u002F\u003E *\u002F}\n \u003CmeshStandardMaterial attach=\"material\" color={color} side={THREE.DoubleSide} \u002F\u003E\n \u003CbufferGeometry attach=\"geometry\" ref={geometry} \u002F\u003E\n {\u002F* \u003CmeshPhongMaterial\n attach=\"material\"\n side={THREE.DoubleSide}\n vertexColors={THREE.FaceColors}\n color={\"#FFFFFF\"}\n shininess={30}\n specular={0x50473b}\n frustumCulled={false}\n \u002F\u003E *\u002F}\n \u003C\u002Fmesh\u003E\n {\u002F* \u003CHtml\u003E\n \u003Cp\u003E{debug}\u003C\u002Fp\u003E\n \u003C\u002FHtml\u003E *\u002F}\n \u003C\u002F\u003E\n )\n}\n","id":"b1813347-fd07-4fab-9dc3-6afd2ed22240","is_binary":false,"title":"Ribbon.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HyfxSTiSo-K","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyxSToBs-F"},{"code":"export const DIMENSIONS = {\n width: window.innerWidth,\n height: window.innerHeight,\n}\n","id":"563f71d6-316d-4880-a971-b3cfb69e8804","is_binary":false,"title":"const.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"ByNlB6orobF","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyxSToBs-F"},{"code":"export const SpaceShader = {\n uniforms: {\n tDiffuse: { value: null },\n time: { value: 0 },\n opacity: { value: 0.15 },\n },\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n uniform sampler2D tDiffuse;\n\t\tuniform float time;\n\t\tuniform float opacity;\n\n\t\tvarying vec2 vUv;\n\n\t\t#define PI 3.14159265359\n\n\t\tmat2 rotate2d(float _angle){\n\t\t\treturn mat2(cos(_angle),-sin(_angle),\n\t\t\t\tsin(_angle),cos(_angle));\n\t\t}\n\n\t\tfloat hash( vec2 p )\n\t\t{\n\t\t\tfloat h = dot(p,vec2(127.1,311.7));\n\t\t\treturn -1.0 + 2.0*fract(sin(h)*43758.5453123);\n\t\t}\n\n\t\t\u002F\u002Fcheap value noise https:\u002F\u002Fwww.shadertoy.com\u002Fview\u002Flsf3WH\n\t\tfloat noise( in vec2 p )\n\t\t{\n\t\t\tvec2 i = floor( p );\n\t\t\tvec2 f = fract( p );\n\n\t\t\tvec2 u = f*f*(3.0-2.0*f);\n\n\t\t\treturn mix( mix( hash( i + vec2(0.0,0.0) ),\n\t\t\t hash( i + vec2(1.0,0.0) ), u.x),\n\t\t\tmix( hash( i + vec2(0.0,1.0) ),\n\t\t\t hash( i + vec2(1.0,1.0) ), u.x), u.y);\n\t\t}\n\n\t\tvoid main( )\n\t\t{\n\n\t\t\tvec2 uv = vUv;\n\t\t\tvec3 orig = texture2D(tDiffuse, uv).rgb;\n\n\t\t\t\u002F\u002Frotate uv space\n\t\t\tuv -= 0.5;\n\t\t\tuv = rotate2d( fract(time\u002F40.0)*2.0*PI ) * uv;\n\t\t\tuv += 0.5;\n\n\t\t\t\u002F\u002F4 corner grad blue\u002Fpink\n\t\t\tvec3 col1 = vec3(uv,1.0);\n\n\t\t\t\u002F\u002F4 corner grad pink\u002Forang\n\t\t\tvec3 col2 = vec3(1,uv);\n\n\t\t\t\u002F\u002Fmix over time\n\t\t\tfloat t = abs(2.0 * fract(time\u002F10.0) - 1.0);\n\t\t\tvec3 col = mix(col1,col2,t);\n\n\t\t\t\u002F\u002Fnoise clouds additive blend\n\t\t\t\u002F\u002Ff returns in -1 - 1 range'\n\t\t\t\u002F\u002Fsimple 1 octave\n\t\t\tfloat tn = time\u002F5.0;\n\t\t\tvec2 uvn = uv * 2.; \u002F\u002F noise scale\n\t\t\tfloat f = noise( uvn + tn);\n\t\t\tf *= 0.8;\n\t\t\tcol += f;\n\t\t\t\u002F\u002Fdarker\n\t\t\tcol -= 0.4;\n\n\t\t\tgl_FragColor = vec4(orig + col * opacity, 1.0);\n\t\t}\n `,\n}\n","id":"a8db7599-e85d-41d7-8af2-dc475701950c","is_binary":false,"title":"SpaceShader.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HkneBTjSibt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"SJMBpiBsWt"},{"code":"export const TiltShiftShader = {\n uniforms: {\n tDiffuse: { type: \"t\", value: null },\n focusPos: { type: \"f\", value: 0.5 }, \u002F\u002F focus Y position 0 = bottom, 1 = top\n range: { type: \"f\", value: 0.5 }, \u002F\u002F distance to fade between none and full blur\n offset: { type: \"f\", value: 0.02 }, \u002F\u002F blur distance\n strength: { type: \"f\", value: 0.9 }, \u002F\u002F 0 = passthru, 1 = full\n },\n vertexShader: \u002F*glsl*\u002F `\n varying vec2 vUv;\n\n\t\tvoid main() {\n\t\t\tvUv = uv;\n\t\t\tgl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );\n\t\t}\n `,\n fragmentShader: \u002F*glsl*\u002F `\n uniform sampler2D tDiffuse;\n\t\tuniform float focusPos;\n\t\tuniform float offset;\n\t\tuniform float range;\n\t\tuniform float strength;\n\n\t\tvarying vec2 vUv;\n\n\t\tvoid main() {\n\n\t\t\tvec4 orig = texture2D(tDiffuse, vUv);\n\n\t\t\tvec2 uv = vUv;\n\n\t\t\tfloat tiltAmt = 1.0 - smoothstep(focusPos - range,focusPos,uv.y) + smoothstep(focusPos,focusPos + range,uv.y);\n\n\t\t\t\u002F\u002Fblur image\n\t\t\tvec4 blurImg = vec4( 0.0 );\n\n\t\t\tfloat offset9 = offset * 0.9;\n\t\t\tfloat offset7 = offset * 0.7;\n\t\t\tfloat offset4 = offset * 0.4;\n\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, 0.4 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.15, 0.37 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, 0.29 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.37, 0.15 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.40, 0.0 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.37, -0.15 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, -0.29 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.15, -0.37 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, -0.4 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.15, 0.37 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, 0.29 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.37, 0.15 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.4, 0.0 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.37, -0.15 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, -0.29 ) * offset );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.15, -0.37 ) * offset );\n\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.15, 0.37 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.37, 0.15 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.37, -0.15 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.15, -0.37 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.15, 0.37 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.37, 0.15 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.37, -0.15 ) * offset9 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.15, -0.37 ) * offset9 );\n\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, 0.29 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.40, 0.0 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, -0.29 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, -0.4 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, 0.29 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.4, 0.0 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, -0.29 ) * offset7 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, 0.4 ) * offset7 );\n\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, 0.29 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.4, 0.0 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.29, -0.29 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, -0.4 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, 0.29 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.4, 0.0 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( -0.29, -0.29 ) * offset4 );\n\t\t\tblurImg += texture2D( tDiffuse, vUv.xy + vec2( 0.0, 0.4 ) * offset4 );\n\n\t\t\tblurImg = blurImg \u002F 41.0;\n\n\t\t\tgl_FragColor = mix(orig, blurImg, tiltAmt * strength);\n\n\t\t}\n `,\n}\n","id":"3e901775-4371-442b-b1ea-8bc04a4e592d","is_binary":false,"title":"TiltShiftShader.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"H1peBajBjZt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"SJMBpiBsWt"},{"code":"html,\nbody,\n#root {\n margin: 0;\n padding: 0;\n width: 100%;\n height: 100%;\n}\n","id":"8eb930e4-4dfa-4848-bb96-09e838ff86f7","is_binary":false,"title":"styles.css","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"BkClSajSjZY","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyxSToBs-F"},{"code":"\u002F*\n * Easing Functions - inspired from http:\u002F\u002Fgizma.com\u002Feasing\u002F\n * only considering the t value for the range [0, 1] =\u003E [0, 1]\n *\u002F\nconst EasingFunctions = {\n \u002F\u002F no easing, no acceleration\n linear: (t: number) =\u003E t,\n \u002F\u002F accelerating from zero velocity\n easeInQuad: (t: number) =\u003E t * t,\n \u002F\u002F decelerating to zero velocity\n easeOutQuad: (t: number) =\u003E t * (2 - t),\n \u002F\u002F acceleration until halfway, then deceleration\n easeInOutQuad: (t: number) =\u003E (t \u003C 0.5 ? 2 * t * t : -1 + (4 - 2 * t) * t),\n \u002F\u002F accelerating from zero velocity\n easeInCubic: (t: number) =\u003E t * t * t,\n \u002F\u002F decelerating to zero velocity\n easeOutCubic: (t: number) =\u003E --t * t * t + 1,\n \u002F\u002F acceleration until halfway, then deceleration\n easeInOutCubic: (t: number) =\u003E (t \u003C 0.5 ? 4 * t * t * t : (t - 1) * (2 * t - 2) * (2 * t - 2) + 1),\n \u002F\u002F accelerating from zero velocity\n easeInQuart: (t: number) =\u003E t * t * t * t,\n \u002F\u002F decelerating to zero velocity\n easeOutQuart: (t: number) =\u003E 1 - --t * t * t * t,\n \u002F\u002F acceleration until halfway, then deceleration\n easeInOutQuart: (t: number) =\u003E (t \u003C 0.5 ? 8 * t * t * t * t : 1 - 8 * --t * t * t * t),\n \u002F\u002F accelerating from zero velocity\n easeInQuint: (t: number) =\u003E t * t * t * t * t,\n \u002F\u002F decelerating to zero velocity\n easeOutQuint: (t: number) =\u003E 1 + --t * t * t * t * t,\n \u002F\u002F acceleration until halfway, then deceleration\n easeInOutQuint: (t: number) =\u003E (t \u003C 0.5 ? 16 * t * t * t * t * t : 1 + 16 * --t * t * t * t * t),\n}\n\ntype E = typeof EasingFunctions\nexport type EaseFn = keyof E\n\nexport default EasingFunctions\n","id":"42c4f0c6-c2f4-4c5a-a974-7fb17476d2de","is_binary":false,"title":"ease.ts","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HJybrpiroWY","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"H17r6oBjbF"},{"code":"import { useFrame, useThree } from \"@react-three\u002Ffiber\"\nimport { useMemo, useRef } from \"react\"\nimport { Observable, of, Subject } from \"rxjs\"\nimport { expand, filter, map, share } from \"rxjs\u002Foperators\"\n\n\u002F\u002F Yoinked from https:\u002F\u002Fwww.learnrxjs.io\u002Flearn-rxjs\u002Frecipes\u002Fgameloop\n\nexport interface IFrameData {\n frameStartTime: number\n deltaTime: number\n}\n\n\u002F**\n * clampTo30FPS(frame)\n *\n * @param frame - {IFrameData} the frame data to check if we need to clamp to max of\n * 30fps time.\n *\n * If we get sporadic LONG frames (browser was navigated away or some other reason the frame takes a while) we want to throttle that so we don't JUMP ahead in any deltaTime calculations too far.\n *\u002F\nexport const clampTo30FPS = (frame: IFrameData) =\u003E {\n if (frame.deltaTime \u003E 1 \u002F 30) {\n frame.deltaTime = 1 \u002F 30\n }\n return frame\n}\n\nexport const clampTo60FPS = (frame: IFrameData) =\u003E {\n if (frame.deltaTime \u003E 1 \u002F 60) {\n frame.deltaTime = 1 \u002F 60\n }\n return frame\n}\n\n\u002F**\n * This function returns an observable that will emit the next frame once the\n * browser has returned an animation frame step. Given the previous frame it calculates\n * the delta time, and we also clamp it to 30FPS in case we get long frames.\n *\u002F\nconst calculateStep: (prevFrame?: IFrameData) =\u003E Observable\u003CIFrameData\u003E = (prevFrame?: IFrameData) =\u003E {\n return new Observable((observer) =\u003E {\n requestAnimationFrame((frameStartTime) =\u003E {\n console.log(\"raf\")\n\n \u002F\u002F Millis to seconds\n const deltaTime = prevFrame ? (frameStartTime - prevFrame.frameStartTime) \u002F 1000 : 0\n observer.next({\n frameStartTime,\n deltaTime,\n })\n })\n })\n}\n\n\u002F\u002F This is our core stream of frames. We use expand to recursively call the\n\u002F\u002F `calculateStep` function above that will give us each new Frame based on the\n\u002F\u002F window.requestAnimationFrame calls. Expand emits the value of the called functions\n\u002F\u002F returned observable, as well as recursively calling the function with that same\n\u002F\u002F emitted value. This works perfectly for calculating our frame steps because each step\n\u002F\u002F needs to know the lastStepFrameTime to calculate the next. We also only want to request\n\u002F\u002F a new frame once the currently requested frame has returned.\nexport const frames$ = of(undefined).pipe(\n expand((val) =\u003E calculateStep(val)),\n \u002F\u002F Expand emits the first value provided to it, and in this\n \u002F\u002F case we just want to ignore the undefined input frame\n filter((frame) =\u003E frame !== undefined),\n map((frame: IFrameData) =\u003E frame.deltaTime),\n share(),\n)\n\nexport const useFrameStream = () =\u003E {\n const s = useRef\u003CSubject\u003Cnumber\u003E\u003E(new Subject\u003Cnumber\u003E())\n useFrame(({ clock }) =\u003E {\n s.current.next(clock.getDelta())\n })\n return s\n}\n","id":"4bb4fc87-fab0-41f2-b726-16bd059e56b0","is_binary":false,"title":"frameStream.ts","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"SJxbSTsSjZF","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"H17r6oBjbF"},{"code":"import { EaseFn, default as easing } from \".\u002Fease\"\nimport { concat, merge, Observable, of, Subject } from \"rxjs\"\nimport { delay, flatMap, map, share, switchMap, take, takeUntil, takeWhile, tap } from \"rxjs\u002Foperators\"\nimport * as React from \"react\"\nimport { useFrameStream } from \".\u002FframeStream\"\nimport { Animatable, Interpolator, mapInterpolator, ObservableSource } from \".\u002FuseAnimation\"\n\nexport const useAnimation = (source: ObservableSource, interpolator: Interpolator, duration: number, sink: (v: Animatable) =\u003E void) =\u003E {\n const underlying = React.useRef(source.value())\n\n const frames$ = useFrameStream()\n\n React.useEffect(() =\u003E {\n sink(underlying.current)\n\n const sub = source.changes\n .pipe(\n switchMap((v) =\u003E {\n const baseTime = Date.now()\n\n return concat(\n frames$.current.pipe(\n share(),\n map((dt) =\u003E (Date.now() - baseTime) \u002F duration),\n takeWhile((t) =\u003E t \u003C 1),\n ),\n of(1),\n ).pipe(map(mapInterpolator(interpolator, underlying.current, v).sample))\n }),\n )\n .subscribe((v) =\u003E {\n underlying.current = v\n sink(v)\n })\n\n return () =\u003E {\n sub.unsubscribe()\n }\n }, [duration, source, sink, interpolator, frames$])\n}\n","id":"eb4c7a06-4db1-477b-8f49-546b1641eaf5","is_binary":false,"title":"three.ts","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"rkbWrTjBs-K","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"H17r6oBjbF"},{"code":"import { EaseFn, default as easing } from \".\u002Fease\"\nimport { concat, merge, Observable, of, Subject } from \"rxjs\"\nimport { delay, flatMap, map, share, switchMap, take, takeUntil, takeWhile, tap } from \"rxjs\u002Foperators\"\nimport * as React from \"react\"\nimport { frames$, useFrameStream } from \".\u002FframeStream\"\n\nexport function interpolator(from: number, to: number, easeFn: EaseFn) {\n const e: (t: number) =\u003E number = easing[easeFn]\n return {\n end: to,\n sample: (t: number) =\u003E from + (to - from) * e(t),\n }\n}\n\nexport function mapInterpolator(i: Interpolator, from: number, to: number) {\n return {\n end: to,\n sample: (t: number) =\u003E from + (to - from) * i.sample(t),\n }\n}\n\nexport type Interpolator = ReturnType\u003Ctypeof interpolator\u003E\n\nexport function sequence(a: Interpolator, b: Interpolator) {\n const sample = (t: number) =\u003E {\n if (t \u003C 0.5) {\n return a.sample(t * 2)\n } else {\n return b.sample((t - 0.5) * 2)\n }\n }\n\n return {\n sample,\n end: b.end,\n }\n}\n\nfunction sequenceN(...i: Interpolator[]) {\n const n = i.length\n\n const sample = (t: number) =\u003E {\n const slice = 1 \u002F n\n const index = t % slice\n \u002F\u002F cut t into n segments\n \u002F\u002F pick correct interpolator based on t\n }\n}\n\nexport type Animatable = number\n\nexport type ObservableSource = {\n changes: Subject\u003CAnimatable\u003E\n value: () =\u003E Animatable\n swap: (swapFn: (v: Animatable) =\u003E Animatable) =\u003E void\n set: (v: Animatable) =\u003E void\n}\n\nexport const useObservable = (v: Animatable): ObservableSource =\u003E {\n const val = React.useRef(v)\n\n const subject = new Subject\u003CAnimatable\u003E()\n\n return {\n changes: subject,\n value: () =\u003E val.current,\n swap: (swapFn: (v: Animatable) =\u003E Animatable) =\u003E {\n val.current = swapFn(val.current)\n subject.next(val.current)\n },\n set: (v: Animatable) =\u003E {\n val.current = v\n subject.next(v)\n },\n }\n}\n\nexport const useObservableState = (v: Animatable): ObservableSource =\u003E {\n const [val, setVal] = React.useState(v)\n\n const subject = React.useRef(new Subject\u003CAnimatable\u003E())\n\n return React.useMemo(\n () =\u003E ({\n changes: subject.current,\n value: () =\u003E val,\n swap: (swapFn: (v: Animatable) =\u003E Animatable) =\u003E {\n subject.current.next(swapFn(val))\n setVal(swapFn)\n },\n set: (v: Animatable) =\u003E {\n subject.current.next(v)\n setVal(v)\n },\n }),\n [val, setVal],\n )\n}\n\n\u002F\u002F frames$.subscribe(console.log)\n\nexport const useAnimation = (source: ObservableSource, interpolator: Interpolator, duration: number, sink: (v: Animatable) =\u003E void) =\u003E {\n const underlying = React.useRef(source.value())\n\n React.useEffect(() =\u003E {\n sink(underlying.current)\n\n const sub = source.changes\n .pipe(\n switchMap((v) =\u003E {\n const baseTime = Date.now()\n\n return concat(\n frames$.pipe(\n share(),\n map((dt) =\u003E (Date.now() - baseTime) \u002F duration),\n takeWhile((t) =\u003E t \u003C 1),\n ),\n of(1),\n ).pipe(map(mapInterpolator(interpolator, underlying.current, v).sample))\n }),\n )\n .subscribe((v) =\u003E {\n underlying.current = v\n sink(v)\n })\n\n return () =\u003E {\n sub.unsubscribe()\n }\n }, [duration, source, sink, interpolator])\n}\n","id":"0def5be6-9156-4c3d-8e2f-8f699e299063","is_binary":false,"title":"useAnimation.ts","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-08-31T06:27:09","upload_id":null,"shortid":"HyMWHasSsZK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"H17r6oBjbF"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FfDYc-frog.png","id":"ee61e0a8-b69d-48eb-a8a9-555849a4f173","is_binary":true,"title":"frog.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-07T05:19:59","upload_id":null,"shortid":"HyDt7Z3Vt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"import * as THREE from \"three\"\nimport React, { useRef, useMemo, useEffect } from \"react\"\nimport { extend, useThree, useFrame, useLoader } from \"@react-three\u002Ffiber\"\nimport { EffectComposer } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FEffectComposer\"\nimport { ShaderPass } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FShaderPass\"\nimport { RenderPass } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FRenderPass\"\nimport { AfterimagePass } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FAfterimagePass\"\nimport { GlitchPass } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FGlitchPass\"\nimport { UnrealBloomPass } from \"three\u002Fexamples\u002Fjsm\u002Fpostprocessing\u002FUnrealBloomPass\"\nimport { CrtShader } from \".\u002Fshaders\u002FCrtShader\"\nimport { TiltShiftShader } from \".\u002Fshaders\u002FTiltShiftShader\"\nimport { NoiseShader } from \".\u002Fshaders\u002FNoiseShader\"\nimport { DitherShader } from \".\u002Fshaders\u002FDitherShader\"\nimport { SpaceShader } from \".\u002Fshaders\u002FSpaceShader\"\nimport { RGBShiftShader } from \".\u002Fshaders\u002FRGBShiftShader\"\n\nimport palette from \".\u002Fresources\u002Fpalette_twilight.png\"\nimport dither from \".\u002Fresources\u002Fbayer16tile2.png\"\n\nextend({\n EffectComposer,\n ShaderPass,\n RenderPass,\n UnrealBloomPass,\n AfterimagePass,\n GlitchPass,\n})\n\nexport default function Effects() {\n const composer = useRef()\n const { scene, gl, size, camera } = useThree()\n const aspect = useMemo(() =\u003E new THREE.Vector2(512, 512), [])\n const noisePass = useRef()\n const spacePass = useRef()\n const ditherPass = useRef()\n\n const [paletteTex, ditherTex] = useLoader(THREE.TextureLoader, [palette, dither])\n ditherTex.wrapS = ditherTex.wrapT = THREE.RepeatWrapping\n console.log(paletteTex.image.width, paletteTex.image.height)\n console.log(ditherTex.image.width, ditherTex.image.height)\n\n useEffect(() =\u003E void composer.current.setSize(size.width, size.height), [size])\n useFrame(({ clock }) =\u003E {\n \u002F\u002F noisePass.current.uniforms.time.value = clock.elapsedTime\n \u002F\u002F spacePass.current.uniforms.time.value = clock.elapsedTime\n \u002F\u002F ditherPass.current.uniforms.tDither.value = ditherTex\n \u002F\u002F ditherPass.current.uniforms.tPalette.value = paletteTex\n composer.current.render()\n }, 1)\n\n return (\n \u003CeffectComposer ref={composer} args={[gl]}\u003E\n \u003CrenderPass attachArray=\"passes\" scene={scene} camera={camera} \u002F\u003E\n {\u002F* \u003CafterimagePass attachArray=\"passes\" args={[0.98]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n\n {\u002F* \u003CglitchPass\n attachArray=\"passes\"\n args={[0.1]}\n scene={scene}\n camera={camera}\n \u002F\u003E *\u002F}\n\n {\u002F* \u003CshaderPass attachArray=\"passes\" args={[TiltShiftShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n {\u002F* \u003CshaderPass ref={noisePass} attachArray=\"passes\" args={[NoiseShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n {\u002F* \u003CshaderPass ref={spacePass} attachArray=\"passes\" args={[SpaceShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n {\u002F* \u003CshaderPass attachArray=\"passes\" args={[RGBShiftShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n {\u002F* \u003CunrealBloomPass attachArray=\"passes\" args={[aspect, 0.4, 0.5, 0]} \u002F\u003E *\u002F}\n {\u002F* \u003CshaderPass ref={ditherPass} attachArray=\"passes\" args={[DitherShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n\n {\u002F* \u003CshaderPass attachArray=\"passes\" args={[CrtShader]} scene={scene} camera={camera} \u002F\u003E *\u002F}\n \u003C\u002FeffectComposer\u003E\n )\n}\n\n\u002F\u002F const blendPass = new THREE.ShaderPass(THREE.BlendShader, \"tDiffuse1\");\n\u002F\u002F blendPass.uniforms[\"tDiffuse2\"].value = savePass.renderTarget.texture;\n\u002F\u002F blendPass.uniforms[\"mixRatio\"].value = 0.8;\n","id":"9a0925c8-30f2-4250-9af8-d78cff645d99","is_binary":false,"title":"Effects.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-07T05:00:17","upload_id":null,"shortid":"SkZgHairiWK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyxSToBs-F"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FmTMV-char.png","id":"1c3bbad5-3008-47cc-bfa4-b983a2169afa","is_binary":true,"title":"char.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-07T05:14:53","upload_id":null,"shortid":"rJrUMb2VK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002Feha5-thumbnail.png","id":"ec234532-ca1d-47a7-a860-9c92dbd4d24c","is_binary":true,"title":"thumbnail.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-07T05:59:04","upload_id":null,"shortid":"S1l23bnNK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":null},{"code":"import { useFrame, useThree } from \"@react-three\u002Ffiber\"\nimport { useCallback, useEffect, useRef } from \"react\"\nimport { Vector3 } from \"three\"\nimport { DIMENSIONS } from \".\u002Fconst\"\n\nexport function useMousePositionZ(targetZ) {\n const { camera } = useThree()\n\n const mousePos = useRef(new Vector3(0, 0, 0))\n const vec = useRef(new Vector3(0, 0, 0))\n const projectedPos = useRef(new Vector3(0, 0, 0))\n\n useFrame(() =\u003E {\n vec.current.copy(mousePos.current)\n vec.current.unproject(camera)\n vec.current.sub(camera.position).normalize()\n\n var distance = (targetZ - camera.position.z) \u002F vec.current.z\n\n \u002F\u002F Note: we want to avoid new-ing here if we can\n projectedPos.current = new Vector3().copy(camera.position).add(vec.current.multiplyScalar(distance))\n })\n\n const onMouseMoved = useCallback(\n (event) =\u003E {\n mousePos.current.set((event.clientX \u002F DIMENSIONS.width) * 2 - 1, -(event.clientY \u002F DIMENSIONS.height) * 2 + 1, 0.5)\n },\n [mousePos],\n )\n\n useEffect(() =\u003E {\n document.addEventListener(\"mousemove\", onMouseMoved)\n\n return () =\u003E {\n document.removeEventListener(\"mousemove\", onMouseMoved)\n }\n }, [onMouseMoved])\n\n return { mouse: mousePos, projected: projectedPos }\n}\n\nexport function useMousePositionY(targetY) {\n const { camera } = useThree()\n\n const mousePos = useRef(new Vector3(0, 0, 0))\n const vec = useRef(new Vector3(0, 0, 0))\n const projectedPos = useRef(new Vector3(0, 0, 0))\n\n useFrame(() =\u003E {\n vec.current.copy(mousePos.current)\n vec.current.unproject(camera)\n vec.current.sub(camera.position).normalize()\n\n var distance = (targetY - camera.position.y) \u002F vec.current.y\n\n \u002F\u002F Note: we want to avoid new-ing here if we can\n projectedPos.current = new Vector3().copy(camera.position).add(vec.current.multiplyScalar(distance))\n })\n\n const onMouseMoved = useCallback(\n (event) =\u003E {\n mousePos.current.set((event.clientX \u002F DIMENSIONS.width) * 2 - 1, -(event.clientY \u002F DIMENSIONS.height) * 2 + 1, 0.5)\n },\n [mousePos],\n )\n\n useEffect(() =\u003E {\n document.addEventListener(\"mousemove\", onMouseMoved)\n\n return () =\u003E {\n document.removeEventListener(\"mousemove\", onMouseMoved)\n }\n }, [onMouseMoved])\n\n return { mouse: mousePos, projected: projectedPos }\n}\n","id":"86f71e81-9269-4dd2-964d-4837b71e1dd4","is_binary":false,"title":"useMousePosition.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-08T00:52:58","upload_id":null,"shortid":"5MBN8","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyxSToBs-F"},{"code":"import React from \"react\"\nimport ReactDOM from \"react-dom\"\nimport App from \".\u002FApp\"\nimport \".\u002Fstyles.css\"\n\nReactDOM.render(\n \u003CReact.StrictMode\u003E\n \u003CApp \u002F\u003E\n \u003C\u002FReact.StrictMode\u003E,\n document.getElementById(\"root\"),\n)\n","id":"ad978dba-6fe5-4654-9718-f4868113afda","is_binary":false,"title":"index.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-08T01:14:20","upload_id":null,"shortid":"HyBeBajBibt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyxSToBs-F"},{"code":"{ \"frames\": {\n \"char 0.png\": {\n \"frame\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 1.png\": {\n \"frame\": { \"x\": 16, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 2.png\": {\n \"frame\": { \"x\": 32, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 3.png\": {\n \"frame\": { \"x\": 48, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 4.png\": {\n \"frame\": { \"x\": 64, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 5.png\": {\n \"frame\": { \"x\": 80, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 6.png\": {\n \"frame\": { \"x\": 96, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 7.png\": {\n \"frame\": { \"x\": 112, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n },\n \"char 8.png\": {\n \"frame\": { \"x\": 128, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 16, \"h\": 16 },\n \"sourceSize\": { \"w\": 16, \"h\": 16 },\n \"duration\": 100\n }\n },\n \"meta\": {\n \"app\": \"http:\u002F\u002Fwww.aseprite.org\u002F\",\n \"version\": \"1.2.15\",\n \"image\": \"char-sheet.png\",\n \"format\": \"RGBA8888\",\n \"size\": { \"w\": 144, \"h\": 16 },\n \"scale\": \"1\",\n \"frameTags\": [\n ],\n \"layers\": [\n { \"name\": \"Layer\", \"opacity\": 255, \"blendMode\": \"normal\" }\n ],\n \"slices\": [\n ]\n }\n}\n","id":"e35a5735-67be-4f66-9228-c1d8e106f4ce","is_binary":false,"title":"char.json","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-26T03:35:14","upload_id":null,"shortid":"HycdPgHIt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"G66pK"},{"code":"{ \"frames\": {\n \"frog 0.png\": {\n \"frame\": { \"x\": 0, \"y\": 0, \"w\": 17, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 17, \"h\": 16 },\n \"sourceSize\": { \"w\": 17, \"h\": 16 },\n \"duration\": 100\n },\n \"frog 1.png\": {\n \"frame\": { \"x\": 17, \"y\": 0, \"w\": 17, \"h\": 16 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 17, \"h\": 16 },\n \"sourceSize\": { \"w\": 17, \"h\": 16 },\n \"duration\": 100\n }\n },\n \"meta\": {\n \"app\": \"http:\u002F\u002Fwww.aseprite.org\u002F\",\n \"version\": \"1.2.15\",\n \"image\": \"frog-sheet.png\",\n \"format\": \"RGBA8888\",\n \"size\": { \"w\": 34, \"h\": 16 },\n \"scale\": \"1\",\n \"frameTags\": [\n ],\n \"layers\": [\n { \"name\": \"Layer\", \"opacity\": 255, \"blendMode\": \"normal\" }\n ],\n \"slices\": [\n ]\n }\n}\n","id":"d0867851-ab30-434d-bad5-c6dacf91fe99","is_binary":false,"title":"frog.json","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-26T03:35:14","upload_id":null,"shortid":"SJg9uwxrIK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"G66pK"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002F2mBW-char-sheet.png","id":"d83cdea3-ee0a-4434-a63e-8f78f23e12b8","is_binary":true,"title":"char-sheet.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-26T03:35:14","upload_id":null,"shortid":"rkbcdDgHIF","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"G66pK"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002F3ykL-frog-sheet.png","id":"f71ae50d-ae62-4e89-bb86-f6e26490aef6","is_binary":true,"title":"frog-sheet.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-26T03:35:14","upload_id":null,"shortid":"HJf9OvxHUt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"G66pK"},{"code":"{\n \"compilerOptions\": {\n \"target\": \"es5\",\n \"lib\": [\n \"es2017\",\n \"dom\",\n \"dom.iterable\",\n \"esnext\"\n ],\n \"allowJs\": true,\n \"skipLibCheck\": true,\n \"esModuleInterop\": true,\n \"allowSyntheticDefaultImports\": true,\n \"strict\": true,\n \"forceConsistentCasingInFileNames\": true,\n \"noFallthroughCasesInSwitch\": true,\n \"module\": \"esnext\",\n \"moduleResolution\": \"node\",\n \"resolveJsonModule\": true,\n \"isolatedModules\": true,\n \"noEmit\": true,\n \"jsx\": \"react-jsx\"\n },\n \"include\": [\n \"src\"\n ]\n}","id":"af3e59a8-94e8-413e-ba4f-f6323b334cc6","is_binary":false,"title":"tsconfig.json","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-26T04:41:19","upload_id":null,"shortid":"H1PbrpsSj-K","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":null},{"code":"{ \"frames\": {\n \"bomber 0.aseprite\": {\n \"frame\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 1.aseprite\": {\n \"frame\": { \"x\": 40, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 2.aseprite\": {\n \"frame\": { \"x\": 80, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 3.aseprite\": {\n \"frame\": { \"x\": 120, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 4.aseprite\": {\n \"frame\": { \"x\": 160, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 5.aseprite\": {\n \"frame\": { \"x\": 200, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 6.aseprite\": {\n \"frame\": { \"x\": 240, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 7.aseprite\": {\n \"frame\": { \"x\": 280, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 8.aseprite\": {\n \"frame\": { \"x\": 320, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 9.aseprite\": {\n \"frame\": { \"x\": 360, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 10.aseprite\": {\n \"frame\": { \"x\": 400, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 11.aseprite\": {\n \"frame\": { \"x\": 440, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 12.aseprite\": {\n \"frame\": { \"x\": 480, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 13.aseprite\": {\n \"frame\": { \"x\": 520, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 14.aseprite\": {\n \"frame\": { \"x\": 560, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 15.aseprite\": {\n \"frame\": { \"x\": 600, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 50\n },\n \"bomber 16.aseprite\": {\n \"frame\": { \"x\": 640, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 17.aseprite\": {\n \"frame\": { \"x\": 680, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 18.aseprite\": {\n \"frame\": { \"x\": 720, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 19.aseprite\": {\n \"frame\": { \"x\": 760, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 20.aseprite\": {\n \"frame\": { \"x\": 800, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 21.aseprite\": {\n \"frame\": { \"x\": 840, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 75\n },\n \"bomber 22.aseprite\": {\n \"frame\": { \"x\": 880, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n },\n \"bomber 23.aseprite\": {\n \"frame\": { \"x\": 920, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 40, \"h\": 40 },\n \"sourceSize\": { \"w\": 40, \"h\": 40 },\n \"duration\": 100\n }\n },\n \"meta\": {\n \"app\": \"http:\u002F\u002Fwww.aseprite.org\u002F\",\n \"version\": \"1.2.15\",\n \"image\": \"bomber.png\",\n \"format\": \"RGBA8888\",\n \"size\": { \"w\": 960, \"h\": 40 },\n \"scale\": \"1\",\n \"frameTags\": [\n { \"name\": \"idle\", \"from\": 0, \"to\": 5, \"direction\": \"forward\" },\n { \"name\": \"boom\", \"from\": 6, \"to\": 23, \"direction\": \"forward\" }\n ],\n \"layers\": [\n { \"name\": \"Layer 1\", \"opacity\": 255, \"blendMode\": \"normal\" }\n ],\n \"slices\": [\n ]\n }\n}\n","id":"cdedd6c5-096c-4399-a9b1-6a250532f26a","is_binary":false,"title":"bomber.json","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-26T04:51:20","upload_id":null,"shortid":"r1JLYWH8t","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"G66pK"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FUfe1-bomber.png","id":"b263a612-0ce9-4e80-9e21-0bf215c14cfe","is_binary":true,"title":"bomber.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-26T04:51:20","upload_id":null,"shortid":"r1g18FWHUK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"G66pK"},{"code":"{ \"frames\": {\n \"smiley 0.ase\": {\n \"frame\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 1.ase\": {\n \"frame\": { \"x\": 36, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 2.ase\": {\n \"frame\": { \"x\": 72, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 3.ase\": {\n \"frame\": { \"x\": 108, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 4.ase\": {\n \"frame\": { \"x\": 144, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 5.ase\": {\n \"frame\": { \"x\": 180, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 6.ase\": {\n \"frame\": { \"x\": 216, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 7.ase\": {\n \"frame\": { \"x\": 252, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 8.ase\": {\n \"frame\": { \"x\": 288, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 9.ase\": {\n \"frame\": { \"x\": 324, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 10.ase\": {\n \"frame\": { \"x\": 360, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 11.ase\": {\n \"frame\": { \"x\": 396, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 12.ase\": {\n \"frame\": { \"x\": 432, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 13.ase\": {\n \"frame\": { \"x\": 468, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 14.ase\": {\n \"frame\": { \"x\": 504, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 15.ase\": {\n \"frame\": { \"x\": 540, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 16.ase\": {\n \"frame\": { \"x\": 576, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n },\n \"smiley 17.ase\": {\n \"frame\": { \"x\": 612, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"rotated\": false,\n \"trimmed\": false,\n \"spriteSourceSize\": { \"x\": 0, \"y\": 0, \"w\": 36, \"h\": 36 },\n \"sourceSize\": { \"w\": 36, \"h\": 36 },\n \"duration\": 100\n }\n },\n \"meta\": {\n \"app\": \"http:\u002F\u002Fwww.aseprite.org\u002F\",\n \"version\": \"1.2.15\",\n \"image\": \"smiley.png\",\n \"format\": \"RGBA8888\",\n \"size\": { \"w\": 648, \"h\": 36 },\n \"scale\": \"1\",\n \"frameTags\": [\n { \"name\": \"idle\", \"from\": 0, \"to\": 7, \"direction\": \"forward\" },\n { \"name\": \"wink\", \"from\": 8, \"to\": 17, \"direction\": \"forward\" }\n ],\n \"layers\": [\n { \"name\": \"Layer\", \"opacity\": 255, \"blendMode\": \"normal\" },\n { \"name\": \"Layer 1\", \"opacity\": 255, \"blendMode\": \"normal\" }\n ],\n \"slices\": [\n ]\n }\n}\n","id":"5fcb55fd-2350-4f87-83cb-97e92c49d21e","is_binary":false,"title":"smiley.json","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-26T05:48:31","upload_id":null,"shortid":"SkwnIzHLt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"G66pK"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FfHMz-smiley.png","id":"b06db490-999c-4624-a92f-ac21de823755","is_binary":true,"title":"smiley.png","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-26T05:48:31","upload_id":null,"shortid":"HkeDnUMrUt","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"G66pK"},{"code":"import { useFrame, useLoader } from \"@react-three\u002Ffiber\"\nimport { MutableRefObject, useEffect, useRef } from \"react\"\nimport * as THREE from \"three\"\n\ntype AsepriteFrame = {\n frame: {\n x: number\n y: number\n w: number\n h: number\n }\n rotated: boolean\n trimmed: boolean\n spriteSourceSize: {\n x: number\n y: number\n w: number\n h: number\n }\n sourceSize: {\n w: number\n h: number\n }\n duration: number\n}\n\ntype AsepriteLayer = {\n name: string\n opacity: number\n blendMode: string\n}\n\ntype AsepriteFrameTag = { name: string; from: number; to: number; direction: \"forward\" | \"backward\" }\n\ntype AsepriteJson = {\n frames: { [name: string]: AsepriteFrame }\n meta: {\n app: string\n version: string\n image: string\n format: string\n size: {\n w: number\n h: number\n }\n frameTags: AsepriteFrameTag[]\n layers: AsepriteLayer[]\n slices: unknown[]\n }\n}\n\nfunction frameList(json: AsepriteJson): AsepriteFrame[] {\n return Object.values(json.frames)\n}\n\nfunction getAnimationFrames(json: AsepriteJson, name: string): AsepriteFrame[] {\n const tag = json.meta.frameTags.find((t) =\u003E t.name === name)\n if (!tag) return []\n\n const allFrames = frameList(json)\n return allFrames.slice(tag.from, tag.to)\n}\n\nexport function useAseprite(src: string, json: AsepriteJson, currentAnimation: string | null = null) {\n const texture: THREE.Texture = useLoader(THREE.TextureLoader, src)\n\n \u002F\u002F We'll be animating these independently, clone the texture\n const tex = texture.clone()\n tex.wrapS = tex.wrapT = THREE.RepeatWrapping\n tex.minFilter = THREE.NearestFilter\n tex.magFilter = THREE.NearestFilter\n tex.needsUpdate = true\n\n const frames: MutableRefObject\u003CAsepriteFrame[]\u003E = useRef([])\n\n const w = json.meta.size.w\n const h = json.meta.size.h\n\n const t = useRef(0)\n const index = useRef(0)\n\n useEffect(() =\u003E {\n t.current = 0\n index.current = 0\n\n if (currentAnimation) {\n frames.current = getAnimationFrames(json, currentAnimation)\n } else {\n frames.current = frameList(json)\n }\n }, [currentAnimation, texture, json])\n\n useFrame((_, delta) =\u003E {\n t.current += delta * 1000\n const f = frames.current[index.current]\n if (!f) return\n\n tex.repeat.set(f.frame.w \u002F w, f.frame.h \u002F h)\n\n if (t.current \u003E= f.duration) {\n index.current += 1\n if (index.current \u003E= frames.current.length) {\n index.current = 0\n }\n\n t.current = 0\n\n tex.offset.x = f.frame.x \u002F w\n tex.offset.y = f.frame.h \u002F h\n }\n })\n\n return tex\n}\n","id":"099cff1b-9c35-4479-95ac-902be3160664","is_binary":false,"title":"useAseprite.ts","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-10-26T05:47:00","upload_id":null,"shortid":"X29DA","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyxSToBs-F"},{"code":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002FS4sy-smiley.png","id":"a6d5a501-d487-4a60-8b9c-c2ebc50a83f3","is_binary":true,"title":"smiley.png","sha":null,"inserted_at":"2021-11-01T05:37:54","updated_at":"2021-11-01T05:37:54","upload_id":null,"shortid":"S1cEaeTIF","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyHH6jSsbt"},{"code":"{\n \"name\": \"texture-uv-slicing-animation\",\n \"version\": \"1.0.0\",\n \"description\": \"isometric 3d scene with billboard sprites\",\n \"keywords\": [],\n \"main\": \"src\u002Findex.js\",\n \"dependencies\": {\n \"@react-three\u002Fdrei\": \"7.12.6\",\n \"@react-three\u002Ffiber\": \"7.0.10\",\n \"leva\": \"0.9.14\",\n \"react\": \"17.0.2\",\n \"react-dom\": \"17.0.2\",\n \"react-scripts\": \"4.0.3\",\n \"react-spring\": \"9.2.6\",\n \"simplex-noise\": \"2.4.0\",\n \"three\": \"0.127.0\",\n \"use-control\": \"0.2.1\"\n },\n \"devDependencies\": {\n \"@types\u002Freact\": \"^17.0.19\",\n \"@types\u002Fthree\": \"^0.131.0\",\n \"typescript\": \"^4.3.5\"\n },\n \"scripts\": {\n \"start\": \"react-scripts start\",\n \"build\": \"react-scripts build\",\n \"test\": \"react-scripts test --env=jsdom\",\n \"eject\": \"react-scripts eject\"\n },\n \"browserslist\": [\n \"\u003E0.2%\",\n \"not dead\",\n \"not ie \u003C= 11\",\n \"not op_mini all\"\n ]\n}","id":"afa6973d-7313-446b-a6a4-e0b30a014876","is_binary":false,"title":"package.json","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-11-19T01:28:21","upload_id":null,"shortid":"rkiHpsBo-t","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":null},{"code":"import { OrbitControls, OrthographicCamera } from \"@react-three\u002Fdrei\"\nimport { Canvas, useFrame, useLoader } from \"@react-three\u002Ffiber\"\nimport { useControls } from \"leva\"\nimport React, { Suspense, useRef } from \"react\"\nimport * as THREE from \"three\"\nimport bard from \".\u002Fresources\u002Fsmiley.png\"\n\nconst AnimatedSprite = React.forwardRef(({ src, scale, position, frameTime = 100 }, ref) =\u003E {\n const [{ frameCount, playing, currentFrame, speed }, set] = useControls(() =\u003E ({\n speed: { value: 1, min: 0, max: 5 },\n frameCount: {\n value: 8,\n min: 0,\n max: 8,\n step: 1,\n },\n currentFrame: {\n value: 0,\n min: 0,\n max: 8,\n step: 1,\n },\n playing: true,\n }))\n\n const texture = useLoader(THREE.TextureLoader, src)\n texture.minFilter = THREE.NearestFilter\n texture.magFilter = THREE.NearestFilter\n\n texture.repeat.set(1 \u002F frameCount, 1 \u002F 1)\n\n const t = useRef(0)\n const index = useRef(0)\n\n useFrame((_, delta) =\u003E {\n if (playing) {\n t.current += delta * 1000 * speed\n\n if (t.current \u003E= frameTime) {\n index.current += 1\n if (index.current \u003E= frameCount) {\n index.current = 0\n }\n\n t.current = 0\n\n set({ currentFrame: index.current })\n texture.offset.x = index.current \u002F frameCount\n }\n } else {\n texture.offset.x = currentFrame \u002F frameCount\n }\n })\n\n return (\n \u003Csprite ref={ref} scale={scale} position={position}\u003E\n \u003CspriteMaterial transparent={true} map={texture} \u002F\u003E\n \u003C\u002Fsprite\u003E\n )\n})\n\nfunction Room() {\n return (\n \u003C\u003E\n \u003CpointLight position={[30, 0, 0]} color=\"blue\" intensity={10} \u002F\u003E\n \u003CAnimatedSprite scale={[2, 2, 2]} src={bard} \u002F\u003E\n \u003C\u002F\u003E\n )\n}\n\nexport default function App() {\n return (\n \u003CCanvas\u003E\n \u003Ccolor attach=\"background\" args={[\"black\"]} \u002F\u003E\n {\u002F* \u003CSky azimuth={1} inclination={0.1} distance={1000} \u002F\u003E *\u002F}\n \u003COrthographicCamera makeDefault position={[15, 15, 15]} zoom={80} \u002F\u003E\n \u003CambientLight intensity={0.1} \u002F\u003E\n \u003CpointLight position={[10, 10, 10]} \u002F\u003E\n \u003CSuspense fallback={null}\u003E\n \u003CRoom \u002F\u003E\n \u003C\u002FSuspense\u003E\n \u003COrbitControls minPolarAngle={Math.PI \u002F 10} maxPolarAngle={Math.PI \u002F 1.5} \u002F\u003E\n \u003C\u002FCanvas\u003E\n )\n}\n","id":"1378ab89-39c7-4089-a681-2ae6b0e688b0","is_binary":false,"title":"App.js","sha":null,"inserted_at":"2021-11-01T05:33:51","updated_at":"2021-11-23T04:38:20","upload_id":null,"shortid":"SJCBporsbK","source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c","directory_shortid":"HyxSToBs-F"}],"preview_secret":null,"owned":false,"draft":false,"original_git":null,"entry":"src\u002Findex.js","always_on":false,"screenshot_url":"https:\u002F\u002Fuploads.codesandbox.io\u002Fuploads\u002Fuser\u002Fc20fa052-9642-4723-8224-8f37e07c5102\u002Feha5-thumbnail.png","privacy":0,"ai_consent":false,"id":"t5e85","restrictions":{"free_plan_editing_restricted":false,"live_sessions_restricted":true},"custom_template":null,"external_resources":[],"v2":false,"free_plan_editing_restricted":false,"feature_flags":{"comments":false,"container_lsp":false},"user_liked":false,"author":{"id":"c20fa052-9642-4723-8224-8f37e07c5102","name":"Ben Follington","username":"bfollington","avatar_url":"https:\u002F\u002Favatars.githubusercontent.com\u002Fu\u002F5009316?v=4","personal_workspace_id":"f3667d6d-94b5-4984-83d4-6338bba3f6ae","subscription_plan":null,"subscription_since":null},"source_id":"a553bff6-920d-425f-910d-9fba7bd86a6c"};