├── .github └── workflows │ └── gh-pages.yml ├── .gitignore ├── README.md ├── genkotsu.png ├── next.config.js ├── package.json ├── public ├── .nojekyll ├── genkotsu.png ├── gif.worker.js └── gif.worker.js.map ├── src ├── assets │ └── keifont.ttf ├── components │ └── GenkotsuDrawer.tsx ├── libs │ └── draw.ts ├── pages │ ├── _app.tsx │ ├── _document.tsx │ └── index.tsx └── styles │ ├── globalStyle.ts │ └── localFonts.ts ├── tsconfig.json ├── yarn.lock └── 【源真ゴシック・源ノ角ゴシック】Apache License 2.0.txt /.github/workflows/gh-pages.yml: -------------------------------------------------------------------------------- 1 | name: GitHub Pages 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | concurrency: 13 | group: ${{ github.workflow }}-${{ github.ref }} 14 | steps: 15 | - name: Checkout 16 | uses: actions/checkout@v2 17 | 18 | - name: Setup Node.js 19 | uses: actions/setup-node@v2 20 | with: 21 | node-version: "18" 22 | 23 | - name: Get yarn cache 24 | id: yarn-cache 25 | run: echo "::set-output name=dir::$(yarn cache dir)" 26 | 27 | - name: Cache dependencies 28 | uses: actions/cache@v2 29 | with: 30 | path: ${{ steps.yarn-cache.outputs.dir }} 31 | key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} 32 | restore-keys: | 33 | ${{ runner.os }}-yarn- 34 | 35 | - name: Install dependencies 36 | run: yarn install --frozen-lockfile 37 | - name: Build 38 | run: yarn build 39 | - name: Export 40 | run: yarn export 41 | - name: Upload 42 | uses: actions/upload-pages-artifact@v1 43 | with: 44 | path: out 45 | 46 | deploy: 47 | needs: build 48 | permissions: 49 | pages: write 50 | id-token: write 51 | environment: 52 | name: github-pages 53 | url: ${{ steps.deployment.outputs.page_url }} 54 | runs-on: ubuntu-latest 55 | steps: 56 | - name: Deploy 57 | id: deployment 58 | uses: actions/deploy-pages@v1 59 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by https://www.toptal.com/developers/gitignore/api/macos,visualstudiocode,nextjs,react 2 | # Edit at https://www.toptal.com/developers/gitignore?templates=macos,visualstudiocode,nextjs,react 3 | 4 | ### macOS ### 5 | # General 6 | .DS_Store 7 | .AppleDouble 8 | .LSOverride 9 | 10 | # Icon must end with two \r 11 | Icon 12 | 13 | 14 | # Thumbnails 15 | ._* 16 | 17 | # Files that might appear in the root of a volume 18 | .DocumentRevisions-V100 19 | .fseventsd 20 | .Spotlight-V100 21 | .TemporaryItems 22 | .Trashes 23 | .VolumeIcon.icns 24 | .com.apple.timemachine.donotpresent 25 | 26 | # Directories potentially created on remote AFP share 27 | .AppleDB 28 | .AppleDesktop 29 | Network Trash Folder 30 | Temporary Items 31 | .apdisk 32 | 33 | ### macOS Patch ### 34 | # iCloud generated files 35 | *.icloud 36 | 37 | ### NextJS ### 38 | # dependencies 39 | /node_modules 40 | /.pnp 41 | .pnp.js 42 | 43 | # testing 44 | /coverage 45 | 46 | # next.js 47 | /.next/ 48 | /out/ 49 | 50 | # production 51 | /build 52 | 53 | # misc 54 | *.pem 55 | 56 | # debug 57 | npm-debug.log* 58 | yarn-debug.log* 59 | yarn-error.log* 60 | .pnpm-debug.log* 61 | 62 | # local env files 63 | .env*.local 64 | 65 | # vercel 66 | .vercel 67 | 68 | # typescript 69 | *.tsbuildinfo 70 | next-env.d.ts 71 | 72 | ### react ### 73 | .DS_* 74 | *.log 75 | logs 76 | **/*.backup.* 77 | **/*.back.* 78 | 79 | node_modules 80 | bower_components 81 | 82 | *.sublime* 83 | 84 | psd 85 | thumb 86 | sketch 87 | 88 | ### VisualStudioCode ### 89 | .vscode/* 90 | !.vscode/settings.json 91 | !.vscode/tasks.json 92 | !.vscode/launch.json 93 | !.vscode/extensions.json 94 | !.vscode/*.code-snippets 95 | 96 | # Local History for Visual Studio Code 97 | .history/ 98 | 99 | # Built Visual Studio Code Extensions 100 | *.vsix 101 | 102 | ### VisualStudioCode Patch ### 103 | # Ignore all local history of files 104 | .history 105 | .ionide 106 | 107 | # End of https://www.toptal.com/developers/gitignore/api/macos,visualstudiocode,nextjs,react -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # genkotsu 2 | 3 | ![げんこつ](./genkotsu.png) 4 | 5 | https://inaniwaudon.github.io/genkotsu/ 6 | 7 | ```bash 8 | yarn 9 | yarn run dev # launch on local env 10 | yarn run build # build 11 | ``` 12 | 13 | 表示用に「[けいふぉんと](http://font.sumomo.ne.jp/font_1.html)」を使用しています。The keifont, that is included in this site, is distributed in the Apache License 2.0. 14 | -------------------------------------------------------------------------------- /genkotsu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/inaniwaudon/genkotsu/ef9f0a97d0c340b25ec101bca6af28f49382fea5/genkotsu.png -------------------------------------------------------------------------------- /next.config.js: -------------------------------------------------------------------------------- 1 | const path = require("path"); 2 | const CopyPlugin = require("copy-webpack-plugin"); 3 | 4 | /** @type {import('next').NextConfig} */ 5 | const nextConfig = { 6 | reactStrictMode: true, 7 | basePath: process.env.GITHUB_ACTIONS ? "/genkotsu" : "", 8 | webpack: (config) => { 9 | const nextPublicDirPath = path.resolve(__dirname, "public"); 10 | config.plugins.push( 11 | new CopyPlugin({ 12 | patterns: [ 13 | { 14 | from: "./node_modules/gif.js/dist/gif.worker.js", 15 | to: nextPublicDirPath, 16 | }, 17 | { 18 | from: "./node_modules/gif.js/dist/gif.worker.js.map", 19 | to: nextPublicDirPath, 20 | }, 21 | ], 22 | }) 23 | ); 24 | 25 | return config; 26 | }, 27 | trailingSlash: true, 28 | }; 29 | 30 | module.exports = nextConfig; 31 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "genkotsu", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "export": "next export", 9 | "start": "next start", 10 | "lint": "next lint" 11 | }, 12 | "dependencies": { 13 | "gif.js": "^0.2.0", 14 | "next": "13.3.2", 15 | "react": "18.2.0", 16 | "react-dom": "18.2.0", 17 | "styled-components": "^5.3.10" 18 | }, 19 | "devDependencies": { 20 | "@types/gif.js": "^0.2.2", 21 | "@types/node": "18.16.3", 22 | "@types/react": "18.2.0", 23 | "@types/react-dom": "18.2.1", 24 | "@types/styled-components": "^5.1.26", 25 | "copy-webpack-plugin": "^11.0.0", 26 | "eslint": "8.39.0", 27 | "eslint-config-next": "^13.3.4", 28 | "typescript": "5.0.4" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /public/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/inaniwaudon/genkotsu/ef9f0a97d0c340b25ec101bca6af28f49382fea5/public/.nojekyll -------------------------------------------------------------------------------- /public/genkotsu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/inaniwaudon/genkotsu/ef9f0a97d0c340b25ec101bca6af28f49382fea5/public/genkotsu.png -------------------------------------------------------------------------------- /public/gif.worker.js: -------------------------------------------------------------------------------- 1 | // gif.worker.js 0.2.0 - https://github.com/jnordberg/gif.js 2 | (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o=ByteArray.pageSize)this.newPage();this.pages[this.page][this.cursor++]=val};ByteArray.prototype.writeUTFBytes=function(string){for(var l=string.length,i=0;i=0)this.dispose=disposalCode};GIFEncoder.prototype.setRepeat=function(repeat){this.repeat=repeat};GIFEncoder.prototype.setTransparent=function(color){this.transparent=color};GIFEncoder.prototype.addFrame=function(imageData){this.image=imageData;this.colorTab=this.globalPalette&&this.globalPalette.slice?this.globalPalette:null;this.getImagePixels();this.analyzePixels();if(this.globalPalette===true)this.globalPalette=this.colorTab;if(this.firstFrame){this.writeLSD();this.writePalette();if(this.repeat>=0){this.writeNetscapeExt()}}this.writeGraphicCtrlExt();this.writeImageDesc();if(!this.firstFrame&&!this.globalPalette)this.writePalette();this.writePixels();this.firstFrame=false};GIFEncoder.prototype.finish=function(){this.out.writeByte(59)};GIFEncoder.prototype.setQuality=function(quality){if(quality<1)quality=1;this.sample=quality};GIFEncoder.prototype.setDither=function(dither){if(dither===true)dither="FloydSteinberg";this.dither=dither};GIFEncoder.prototype.setGlobalPalette=function(palette){this.globalPalette=palette};GIFEncoder.prototype.getGlobalPalette=function(){return this.globalPalette&&this.globalPalette.slice&&this.globalPalette.slice(0)||this.globalPalette};GIFEncoder.prototype.writeHeader=function(){this.out.writeUTFBytes("GIF89a")};GIFEncoder.prototype.analyzePixels=function(){if(!this.colorTab){this.neuQuant=new NeuQuant(this.pixels,this.sample);this.neuQuant.buildColormap();this.colorTab=this.neuQuant.getColormap()}if(this.dither){this.ditherPixels(this.dither.replace("-serpentine",""),this.dither.match(/-serpentine/)!==null)}else{this.indexPixels()}this.pixels=null;this.colorDepth=8;this.palSize=7;if(this.transparent!==null){this.transIndex=this.findClosest(this.transparent,true)}};GIFEncoder.prototype.indexPixels=function(imgq){var nPix=this.pixels.length/3;this.indexedPixels=new Uint8Array(nPix);var k=0;for(var j=0;j=0&&x1+x=0&&y1+y>16,(c&65280)>>8,c&255,used)};GIFEncoder.prototype.findClosestRGB=function(r,g,b,used){if(this.colorTab===null)return-1;if(this.neuQuant&&!used){return this.neuQuant.lookupRGB(r,g,b)}var c=b|g<<8|r<<16;var minpos=0;var dmin=256*256*256;var len=this.colorTab.length;for(var i=0,index=0;i=0){disp=dispose&7}disp<<=2;this.out.writeByte(0|disp|0|transp);this.writeShort(this.delay);this.out.writeByte(this.transIndex);this.out.writeByte(0)};GIFEncoder.prototype.writeImageDesc=function(){this.out.writeByte(44);this.writeShort(0);this.writeShort(0);this.writeShort(this.width);this.writeShort(this.height);if(this.firstFrame||this.globalPalette){this.out.writeByte(0)}else{this.out.writeByte(128|0|0|0|this.palSize)}};GIFEncoder.prototype.writeLSD=function(){this.writeShort(this.width);this.writeShort(this.height);this.out.writeByte(128|112|0|this.palSize);this.out.writeByte(0);this.out.writeByte(0)};GIFEncoder.prototype.writeNetscapeExt=function(){this.out.writeByte(33);this.out.writeByte(255);this.out.writeByte(11);this.out.writeUTFBytes("NETSCAPE2.0");this.out.writeByte(3);this.out.writeByte(1);this.writeShort(this.repeat);this.out.writeByte(0)};GIFEncoder.prototype.writePalette=function(){this.out.writeBytes(this.colorTab);var n=3*256-this.colorTab.length;for(var i=0;i>8&255)};GIFEncoder.prototype.writePixels=function(){var enc=new LZWEncoder(this.width,this.height,this.indexedPixels,this.colorDepth);enc.encode(this.out)};GIFEncoder.prototype.stream=function(){return this.out};module.exports=GIFEncoder},{"./LZWEncoder.js":2,"./TypedNeuQuant.js":3}],2:[function(require,module,exports){var EOF=-1;var BITS=12;var HSIZE=5003;var masks=[0,1,3,7,15,31,63,127,255,511,1023,2047,4095,8191,16383,32767,65535];function LZWEncoder(width,height,pixels,colorDepth){var initCodeSize=Math.max(2,colorDepth);var accum=new Uint8Array(256);var htab=new Int32Array(HSIZE);var codetab=new Int32Array(HSIZE);var cur_accum,cur_bits=0;var a_count;var free_ent=0;var maxcode;var clear_flg=false;var g_init_bits,ClearCode,EOFCode;function char_out(c,outs){accum[a_count++]=c;if(a_count>=254)flush_char(outs)}function cl_block(outs){cl_hash(HSIZE);free_ent=ClearCode+2;clear_flg=true;output(ClearCode,outs)}function cl_hash(hsize){for(var i=0;i=0){disp=hsize_reg-i;if(i===0)disp=1;do{if((i-=disp)<0)i+=hsize_reg;if(htab[i]===fcode){ent=codetab[i];continue outer_loop}}while(htab[i]>=0)}output(ent,outs);ent=c;if(free_ent<1<0){outs.writeByte(a_count);outs.writeBytes(accum,0,a_count);a_count=0}}function MAXCODE(n_bits){return(1<0)cur_accum|=code<=8){char_out(cur_accum&255,outs);cur_accum>>=8;cur_bits-=8}if(free_ent>maxcode||clear_flg){if(clear_flg){maxcode=MAXCODE(n_bits=g_init_bits);clear_flg=false}else{++n_bits;if(n_bits==BITS)maxcode=1<0){char_out(cur_accum&255,outs);cur_accum>>=8;cur_bits-=8}flush_char(outs)}}this.encode=encode}module.exports=LZWEncoder},{}],3:[function(require,module,exports){var ncycles=100;var netsize=256;var maxnetpos=netsize-1;var netbiasshift=4;var intbiasshift=16;var intbias=1<>betashift;var betagamma=intbias<>3;var radiusbiasshift=6;var radiusbias=1<>3);var i,v;for(i=0;i>=netbiasshift;network[i][1]>>=netbiasshift;network[i][2]>>=netbiasshift;network[i][3]=i}}function altersingle(alpha,i,b,g,r){network[i][0]-=alpha*(network[i][0]-b)/initalpha;network[i][1]-=alpha*(network[i][1]-g)/initalpha;network[i][2]-=alpha*(network[i][2]-r)/initalpha}function alterneigh(radius,i,b,g,r){var lo=Math.abs(i-radius);var hi=Math.min(i+radius,netsize);var j=i+1;var k=i-1;var m=1;var p,a;while(jlo){a=radpower[m++];if(jlo){p=network[k--];p[0]-=a*(p[0]-b)/alpharadbias;p[1]-=a*(p[1]-g)/alpharadbias;p[2]-=a*(p[2]-r)/alpharadbias}}}function contest(b,g,r){var bestd=~(1<<31);var bestbiasd=bestd;var bestpos=-1;var bestbiaspos=bestpos;var i,n,dist,biasdist,betafreq;for(i=0;i>intbiasshift-netbiasshift);if(biasdist>betashift;freq[i]-=betafreq;bias[i]+=betafreq<>1;for(j=previouscol+1;j>1;for(j=previouscol+1;j<256;j++)netindex[j]=maxnetpos}function inxsearch(b,g,r){var a,p,dist;var bestd=1e3;var best=-1;var i=netindex[g];var j=i-1;while(i=0){if(i=bestd)i=netsize;else{i++;if(dist<0)dist=-dist;a=p[0]-b;if(a<0)a=-a;dist+=a;if(dist=0){p=network[j];dist=g-p[1];if(dist>=bestd)j=-1;else{j--;if(dist<0)dist=-dist;a=p[0]-b;if(a<0)a=-a;dist+=a;if(dist>radiusbiasshift;if(rad<=1)rad=0;for(i=0;i=lengthcount)pix-=lengthcount;i++;if(delta===0)delta=1;if(i%delta===0){alpha-=alpha/alphadec;radius-=radius/radiusdec;rad=radius>>radiusbiasshift;if(rad<=1)rad=0;for(j=0;j= ByteArray.pageSize) this.newPage();\n this.pages[this.page][this.cursor++] = val;\n};\n\nByteArray.prototype.writeUTFBytes = function(string) {\n for (var l = string.length, i = 0; i < l; i++)\n this.writeByte(string.charCodeAt(i));\n};\n\nByteArray.prototype.writeBytes = function(array, offset, length) {\n for (var l = length || array.length, i = offset || 0; i < l; i++)\n this.writeByte(array[i]);\n};\n\nfunction GIFEncoder(width, height) {\n // image size\n this.width = ~~width;\n this.height = ~~height;\n\n // transparent color if given\n this.transparent = null;\n\n // transparent index in color table\n this.transIndex = 0;\n\n // -1 = no repeat, 0 = forever. anything else is repeat count\n this.repeat = -1;\n\n // frame delay (hundredths)\n this.delay = 0;\n\n this.image = null; // current frame\n this.pixels = null; // BGR byte array from frame\n this.indexedPixels = null; // converted frame indexed to palette\n this.colorDepth = null; // number of bit planes\n this.colorTab = null; // RGB palette\n this.neuQuant = null; // NeuQuant instance that was used to generate this.colorTab.\n this.usedEntry = new Array(); // active palette entries\n this.palSize = 7; // color table size (bits-1)\n this.dispose = -1; // disposal code (-1 = use default)\n this.firstFrame = true;\n this.sample = 10; // default sample interval for quantizer\n this.dither = false; // default dithering\n this.globalPalette = false;\n\n this.out = new ByteArray();\n}\n\n/*\n Sets the delay time between each frame, or changes it for subsequent frames\n (applies to last frame added)\n*/\nGIFEncoder.prototype.setDelay = function(milliseconds) {\n this.delay = Math.round(milliseconds / 10);\n};\n\n/*\n Sets frame rate in frames per second.\n*/\nGIFEncoder.prototype.setFrameRate = function(fps) {\n this.delay = Math.round(100 / fps);\n};\n\n/*\n Sets the GIF frame disposal code for the last added frame and any\n subsequent frames.\n\n Default is 0 if no transparent color has been set, otherwise 2.\n*/\nGIFEncoder.prototype.setDispose = function(disposalCode) {\n if (disposalCode >= 0) this.dispose = disposalCode;\n};\n\n/*\n Sets the number of times the set of GIF frames should be played.\n\n -1 = play once\n 0 = repeat indefinitely\n\n Default is -1\n\n Must be invoked before the first image is added\n*/\n\nGIFEncoder.prototype.setRepeat = function(repeat) {\n this.repeat = repeat;\n};\n\n/*\n Sets the transparent color for the last added frame and any subsequent\n frames. Since all colors are subject to modification in the quantization\n process, the color in the final palette for each frame closest to the given\n color becomes the transparent color for that frame. May be set to null to\n indicate no transparent color.\n*/\nGIFEncoder.prototype.setTransparent = function(color) {\n this.transparent = color;\n};\n\n/*\n Adds next GIF frame. The frame is not written immediately, but is\n actually deferred until the next frame is received so that timing\n data can be inserted. Invoking finish() flushes all frames.\n*/\nGIFEncoder.prototype.addFrame = function(imageData) {\n this.image = imageData;\n\n this.colorTab = this.globalPalette && this.globalPalette.slice ? this.globalPalette : null;\n\n this.getImagePixels(); // convert to correct format if necessary\n this.analyzePixels(); // build color table & map pixels\n\n if (this.globalPalette === true) this.globalPalette = this.colorTab;\n\n if (this.firstFrame) {\n this.writeLSD(); // logical screen descriptior\n this.writePalette(); // global color table\n if (this.repeat >= 0) {\n // use NS app extension to indicate reps\n this.writeNetscapeExt();\n }\n }\n\n this.writeGraphicCtrlExt(); // write graphic control extension\n this.writeImageDesc(); // image descriptor\n if (!this.firstFrame && !this.globalPalette) this.writePalette(); // local color table\n this.writePixels(); // encode and write pixel data\n\n this.firstFrame = false;\n};\n\n/*\n Adds final trailer to the GIF stream, if you don't call the finish method\n the GIF stream will not be valid.\n*/\nGIFEncoder.prototype.finish = function() {\n this.out.writeByte(0x3b); // gif trailer\n};\n\n/*\n Sets quality of color quantization (conversion of images to the maximum 256\n colors allowed by the GIF specification). Lower values (minimum = 1)\n produce better colors, but slow processing significantly. 10 is the\n default, and produces good color mapping at reasonable speeds. Values\n greater than 20 do not yield significant improvements in speed.\n*/\nGIFEncoder.prototype.setQuality = function(quality) {\n if (quality < 1) quality = 1;\n this.sample = quality;\n};\n\n/*\n Sets dithering method. Available are:\n - FALSE no dithering\n - TRUE or FloydSteinberg\n - FalseFloydSteinberg\n - Stucki\n - Atkinson\n You can add '-serpentine' to use serpentine scanning\n*/\nGIFEncoder.prototype.setDither = function(dither) {\n if (dither === true) dither = 'FloydSteinberg';\n this.dither = dither;\n};\n\n/*\n Sets global palette for all frames.\n You can provide TRUE to create global palette from first picture.\n Or an array of r,g,b,r,g,b,...\n*/\nGIFEncoder.prototype.setGlobalPalette = function(palette) {\n this.globalPalette = palette;\n};\n\n/*\n Returns global palette used for all frames.\n If setGlobalPalette(true) was used, then this function will return\n calculated palette after the first frame is added.\n*/\nGIFEncoder.prototype.getGlobalPalette = function() {\n return (this.globalPalette && this.globalPalette.slice && this.globalPalette.slice(0)) || this.globalPalette;\n};\n\n/*\n Writes GIF file header\n*/\nGIFEncoder.prototype.writeHeader = function() {\n this.out.writeUTFBytes(\"GIF89a\");\n};\n\n/*\n Analyzes current frame colors and creates color map.\n*/\nGIFEncoder.prototype.analyzePixels = function() {\n if (!this.colorTab) {\n this.neuQuant = new NeuQuant(this.pixels, this.sample);\n this.neuQuant.buildColormap(); // create reduced palette\n this.colorTab = this.neuQuant.getColormap();\n }\n\n // map image pixels to new palette\n if (this.dither) {\n this.ditherPixels(this.dither.replace('-serpentine', ''), this.dither.match(/-serpentine/) !== null);\n } else {\n this.indexPixels();\n }\n\n this.pixels = null;\n this.colorDepth = 8;\n this.palSize = 7;\n\n // get closest match to transparent color if specified\n if (this.transparent !== null) {\n this.transIndex = this.findClosest(this.transparent, true);\n }\n};\n\n/*\n Index pixels, without dithering\n*/\nGIFEncoder.prototype.indexPixels = function(imgq) {\n var nPix = this.pixels.length / 3;\n this.indexedPixels = new Uint8Array(nPix);\n var k = 0;\n for (var j = 0; j < nPix; j++) {\n var index = this.findClosestRGB(\n this.pixels[k++] & 0xff,\n this.pixels[k++] & 0xff,\n this.pixels[k++] & 0xff\n );\n this.usedEntry[index] = true;\n this.indexedPixels[j] = index;\n }\n};\n\n/*\n Taken from http://jsbin.com/iXofIji/2/edit by PAEz\n*/\nGIFEncoder.prototype.ditherPixels = function(kernel, serpentine) {\n var kernels = {\n FalseFloydSteinberg: [\n [3 / 8, 1, 0],\n [3 / 8, 0, 1],\n [2 / 8, 1, 1]\n ],\n FloydSteinberg: [\n [7 / 16, 1, 0],\n [3 / 16, -1, 1],\n [5 / 16, 0, 1],\n [1 / 16, 1, 1]\n ],\n Stucki: [\n [8 / 42, 1, 0],\n [4 / 42, 2, 0],\n [2 / 42, -2, 1],\n [4 / 42, -1, 1],\n [8 / 42, 0, 1],\n [4 / 42, 1, 1],\n [2 / 42, 2, 1],\n [1 / 42, -2, 2],\n [2 / 42, -1, 2],\n [4 / 42, 0, 2],\n [2 / 42, 1, 2],\n [1 / 42, 2, 2]\n ],\n Atkinson: [\n [1 / 8, 1, 0],\n [1 / 8, 2, 0],\n [1 / 8, -1, 1],\n [1 / 8, 0, 1],\n [1 / 8, 1, 1],\n [1 / 8, 0, 2]\n ]\n };\n\n if (!kernel || !kernels[kernel]) {\n throw 'Unknown dithering kernel: ' + kernel;\n }\n\n var ds = kernels[kernel];\n var index = 0,\n height = this.height,\n width = this.width,\n data = this.pixels;\n var direction = serpentine ? -1 : 1;\n\n this.indexedPixels = new Uint8Array(this.pixels.length / 3);\n\n for (var y = 0; y < height; y++) {\n\n if (serpentine) direction = direction * -1;\n\n for (var x = (direction == 1 ? 0 : width - 1), xend = (direction == 1 ? width : 0); x !== xend; x += direction) {\n\n index = (y * width) + x;\n // Get original colour\n var idx = index * 3;\n var r1 = data[idx];\n var g1 = data[idx + 1];\n var b1 = data[idx + 2];\n\n // Get converted colour\n idx = this.findClosestRGB(r1, g1, b1);\n this.usedEntry[idx] = true;\n this.indexedPixels[index] = idx;\n idx *= 3;\n var r2 = this.colorTab[idx];\n var g2 = this.colorTab[idx + 1];\n var b2 = this.colorTab[idx + 2];\n\n var er = r1 - r2;\n var eg = g1 - g2;\n var eb = b1 - b2;\n\n for (var i = (direction == 1 ? 0: ds.length - 1), end = (direction == 1 ? ds.length : 0); i !== end; i += direction) {\n var x1 = ds[i][1]; // *direction; // Should this by timesd by direction?..to make the kernel go in the opposite direction....got no idea....\n var y1 = ds[i][2];\n if (x1 + x >= 0 && x1 + x < width && y1 + y >= 0 && y1 + y < height) {\n var d = ds[i][0];\n idx = index + x1 + (y1 * width);\n idx *= 3;\n\n data[idx] = Math.max(0, Math.min(255, data[idx] + er * d));\n data[idx + 1] = Math.max(0, Math.min(255, data[idx + 1] + eg * d));\n data[idx + 2] = Math.max(0, Math.min(255, data[idx + 2] + eb * d));\n }\n }\n }\n }\n};\n\n/*\n Returns index of palette color closest to c\n*/\nGIFEncoder.prototype.findClosest = function(c, used) {\n return this.findClosestRGB((c & 0xFF0000) >> 16, (c & 0x00FF00) >> 8, (c & 0x0000FF), used);\n};\n\nGIFEncoder.prototype.findClosestRGB = function(r, g, b, used) {\n if (this.colorTab === null) return -1;\n\n if (this.neuQuant && !used) {\n return this.neuQuant.lookupRGB(r, g, b);\n }\n \n var c = b | (g << 8) | (r << 16);\n\n var minpos = 0;\n var dmin = 256 * 256 * 256;\n var len = this.colorTab.length;\n\n for (var i = 0, index = 0; i < len; index++) {\n var dr = r - (this.colorTab[i++] & 0xff);\n var dg = g - (this.colorTab[i++] & 0xff);\n var db = b - (this.colorTab[i++] & 0xff);\n var d = dr * dr + dg * dg + db * db;\n if ((!used || this.usedEntry[index]) && (d < dmin)) {\n dmin = d;\n minpos = index;\n }\n }\n\n return minpos;\n};\n\n/*\n Extracts image pixels into byte array pixels\n (removes alphachannel from canvas imagedata)\n*/\nGIFEncoder.prototype.getImagePixels = function() {\n var w = this.width;\n var h = this.height;\n this.pixels = new Uint8Array(w * h * 3);\n\n var data = this.image;\n var srcPos = 0;\n var count = 0;\n\n for (var i = 0; i < h; i++) {\n for (var j = 0; j < w; j++) {\n this.pixels[count++] = data[srcPos++];\n this.pixels[count++] = data[srcPos++];\n this.pixels[count++] = data[srcPos++];\n srcPos++;\n }\n }\n};\n\n/*\n Writes Graphic Control Extension\n*/\nGIFEncoder.prototype.writeGraphicCtrlExt = function() {\n this.out.writeByte(0x21); // extension introducer\n this.out.writeByte(0xf9); // GCE label\n this.out.writeByte(4); // data block size\n\n var transp, disp;\n if (this.transparent === null) {\n transp = 0;\n disp = 0; // dispose = no action\n } else {\n transp = 1;\n disp = 2; // force clear if using transparent color\n }\n\n if (this.dispose >= 0) {\n disp = dispose & 7; // user override\n }\n disp <<= 2;\n\n // packed fields\n this.out.writeByte(\n 0 | // 1:3 reserved\n disp | // 4:6 disposal\n 0 | // 7 user input - 0 = none\n transp // 8 transparency flag\n );\n\n this.writeShort(this.delay); // delay x 1/100 sec\n this.out.writeByte(this.transIndex); // transparent color index\n this.out.writeByte(0); // block terminator\n};\n\n/*\n Writes Image Descriptor\n*/\nGIFEncoder.prototype.writeImageDesc = function() {\n this.out.writeByte(0x2c); // image separator\n this.writeShort(0); // image position x,y = 0,0\n this.writeShort(0);\n this.writeShort(this.width); // image size\n this.writeShort(this.height);\n\n // packed fields\n if (this.firstFrame || this.globalPalette) {\n // no LCT - GCT is used for first (or only) frame\n this.out.writeByte(0);\n } else {\n // specify normal LCT\n this.out.writeByte(\n 0x80 | // 1 local color table 1=yes\n 0 | // 2 interlace - 0=no\n 0 | // 3 sorted - 0=no\n 0 | // 4-5 reserved\n this.palSize // 6-8 size of color table\n );\n }\n};\n\n/*\n Writes Logical Screen Descriptor\n*/\nGIFEncoder.prototype.writeLSD = function() {\n // logical screen size\n this.writeShort(this.width);\n this.writeShort(this.height);\n\n // packed fields\n this.out.writeByte(\n 0x80 | // 1 : global color table flag = 1 (gct used)\n 0x70 | // 2-4 : color resolution = 7\n 0x00 | // 5 : gct sort flag = 0\n this.palSize // 6-8 : gct size\n );\n\n this.out.writeByte(0); // background color index\n this.out.writeByte(0); // pixel aspect ratio - assume 1:1\n};\n\n/*\n Writes Netscape application extension to define repeat count.\n*/\nGIFEncoder.prototype.writeNetscapeExt = function() {\n this.out.writeByte(0x21); // extension introducer\n this.out.writeByte(0xff); // app extension label\n this.out.writeByte(11); // block size\n this.out.writeUTFBytes('NETSCAPE2.0'); // app id + auth code\n this.out.writeByte(3); // sub-block size\n this.out.writeByte(1); // loop sub-block id\n this.writeShort(this.repeat); // loop count (extra iterations, 0=repeat forever)\n this.out.writeByte(0); // block terminator\n};\n\n/*\n Writes color table\n*/\nGIFEncoder.prototype.writePalette = function() {\n this.out.writeBytes(this.colorTab);\n var n = (3 * 256) - this.colorTab.length;\n for (var i = 0; i < n; i++)\n this.out.writeByte(0);\n};\n\nGIFEncoder.prototype.writeShort = function(pValue) {\n this.out.writeByte(pValue & 0xFF);\n this.out.writeByte((pValue >> 8) & 0xFF);\n};\n\n/*\n Encodes and writes pixel data\n*/\nGIFEncoder.prototype.writePixels = function() {\n var enc = new LZWEncoder(this.width, this.height, this.indexedPixels, this.colorDepth);\n enc.encode(this.out);\n};\n\n/*\n Retrieves the GIF stream\n*/\nGIFEncoder.prototype.stream = function() {\n return this.out;\n};\n\nmodule.exports = GIFEncoder;\n","/*\n LZWEncoder.js\n\n Authors\n Kevin Weiner (original Java version - kweiner@fmsware.com)\n Thibault Imbert (AS3 version - bytearray.org)\n Johan Nordberg (JS version - code@johan-nordberg.com)\n\n Acknowledgements\n GIFCOMPR.C - GIF Image compression routines\n Lempel-Ziv compression based on 'compress'. GIF modifications by\n David Rowley (mgardi@watdcsu.waterloo.edu)\n GIF Image compression - modified 'compress'\n Based on: compress.c - File compression ala IEEE Computer, June 1984.\n By Authors: Spencer W. Thomas (decvax!harpo!utah-cs!utah-gr!thomas)\n Jim McKie (decvax!mcvax!jim)\n Steve Davies (decvax!vax135!petsd!peora!srd)\n Ken Turkowski (decvax!decwrl!turtlevax!ken)\n James A. Woods (decvax!ihnp4!ames!jaw)\n Joe Orost (decvax!vax135!petsd!joe)\n*/\n\nvar EOF = -1;\nvar BITS = 12;\nvar HSIZE = 5003; // 80% occupancy\nvar masks = [0x0000, 0x0001, 0x0003, 0x0007, 0x000F, 0x001F,\n 0x003F, 0x007F, 0x00FF, 0x01FF, 0x03FF, 0x07FF,\n 0x0FFF, 0x1FFF, 0x3FFF, 0x7FFF, 0xFFFF];\n\nfunction LZWEncoder(width, height, pixels, colorDepth) {\n var initCodeSize = Math.max(2, colorDepth);\n\n var accum = new Uint8Array(256);\n var htab = new Int32Array(HSIZE);\n var codetab = new Int32Array(HSIZE);\n\n var cur_accum, cur_bits = 0;\n var a_count;\n var free_ent = 0; // first unused entry\n var maxcode;\n\n // block compression parameters -- after all codes are used up,\n // and compression rate changes, start over.\n var clear_flg = false;\n\n // Algorithm: use open addressing double hashing (no chaining) on the\n // prefix code / next character combination. We do a variant of Knuth's\n // algorithm D (vol. 3, sec. 6.4) along with G. Knott's relatively-prime\n // secondary probe. Here, the modular division first probe is gives way\n // to a faster exclusive-or manipulation. Also do block compression with\n // an adaptive reset, whereby the code table is cleared when the compression\n // ratio decreases, but after the table fills. The variable-length output\n // codes are re-sized at this point, and a special CLEAR code is generated\n // for the decompressor. Late addition: construct the table according to\n // file size for noticeable speed improvement on small files. Please direct\n // questions about this implementation to ames!jaw.\n var g_init_bits, ClearCode, EOFCode;\n\n // Add a character to the end of the current packet, and if it is 254\n // characters, flush the packet to disk.\n function char_out(c, outs) {\n accum[a_count++] = c;\n if (a_count >= 254) flush_char(outs);\n }\n\n // Clear out the hash table\n // table clear for block compress\n function cl_block(outs) {\n cl_hash(HSIZE);\n free_ent = ClearCode + 2;\n clear_flg = true;\n output(ClearCode, outs);\n }\n\n // Reset code table\n function cl_hash(hsize) {\n for (var i = 0; i < hsize; ++i) htab[i] = -1;\n }\n\n function compress(init_bits, outs) {\n var fcode, c, i, ent, disp, hsize_reg, hshift;\n\n // Set up the globals: g_init_bits - initial number of bits\n g_init_bits = init_bits;\n\n // Set up the necessary values\n clear_flg = false;\n n_bits = g_init_bits;\n maxcode = MAXCODE(n_bits);\n\n ClearCode = 1 << (init_bits - 1);\n EOFCode = ClearCode + 1;\n free_ent = ClearCode + 2;\n\n a_count = 0; // clear packet\n\n ent = nextPixel();\n\n hshift = 0;\n for (fcode = HSIZE; fcode < 65536; fcode *= 2) ++hshift;\n hshift = 8 - hshift; // set hash code range bound\n hsize_reg = HSIZE;\n cl_hash(hsize_reg); // clear hash table\n\n output(ClearCode, outs);\n\n outer_loop: while ((c = nextPixel()) != EOF) {\n fcode = (c << BITS) + ent;\n i = (c << hshift) ^ ent; // xor hashing\n if (htab[i] === fcode) {\n ent = codetab[i];\n continue;\n } else if (htab[i] >= 0) { // non-empty slot\n disp = hsize_reg - i; // secondary hash (after G. Knott)\n if (i === 0) disp = 1;\n do {\n if ((i -= disp) < 0) i += hsize_reg;\n if (htab[i] === fcode) {\n ent = codetab[i];\n continue outer_loop;\n }\n } while (htab[i] >= 0);\n }\n output(ent, outs);\n ent = c;\n if (free_ent < 1 << BITS) {\n codetab[i] = free_ent++; // code -> hashtable\n htab[i] = fcode;\n } else {\n cl_block(outs);\n }\n }\n\n // Put out the final code.\n output(ent, outs);\n output(EOFCode, outs);\n }\n\n function encode(outs) {\n outs.writeByte(initCodeSize); // write \"initial code size\" byte\n remaining = width * height; // reset navigation variables\n curPixel = 0;\n compress(initCodeSize + 1, outs); // compress and write the pixel data\n outs.writeByte(0); // write block terminator\n }\n\n // Flush the packet to disk, and reset the accumulator\n function flush_char(outs) {\n if (a_count > 0) {\n outs.writeByte(a_count);\n outs.writeBytes(accum, 0, a_count);\n a_count = 0;\n }\n }\n\n function MAXCODE(n_bits) {\n return (1 << n_bits) - 1;\n }\n\n // Return the next pixel from the image\n function nextPixel() {\n if (remaining === 0) return EOF;\n --remaining;\n var pix = pixels[curPixel++];\n return pix & 0xff;\n }\n\n function output(code, outs) {\n cur_accum &= masks[cur_bits];\n\n if (cur_bits > 0) cur_accum |= (code << cur_bits);\n else cur_accum = code;\n\n cur_bits += n_bits;\n\n while (cur_bits >= 8) {\n char_out((cur_accum & 0xff), outs);\n cur_accum >>= 8;\n cur_bits -= 8;\n }\n\n // If the next entry is going to be too big for the code size,\n // then increase it, if possible.\n if (free_ent > maxcode || clear_flg) {\n if (clear_flg) {\n maxcode = MAXCODE(n_bits = g_init_bits);\n clear_flg = false;\n } else {\n ++n_bits;\n if (n_bits == BITS) maxcode = 1 << BITS;\n else maxcode = MAXCODE(n_bits);\n }\n }\n\n if (code == EOFCode) {\n // At EOF, write the rest of the buffer.\n while (cur_bits > 0) {\n char_out((cur_accum & 0xff), outs);\n cur_accum >>= 8;\n cur_bits -= 8;\n }\n flush_char(outs);\n }\n }\n\n this.encode = encode;\n}\n\nmodule.exports = LZWEncoder;\n","/* NeuQuant Neural-Net Quantization Algorithm\n * ------------------------------------------\n *\n * Copyright (c) 1994 Anthony Dekker\n *\n * NEUQUANT Neural-Net quantization algorithm by Anthony Dekker, 1994.\n * See \"Kohonen neural networks for optimal colour quantization\"\n * in \"Network: Computation in Neural Systems\" Vol. 5 (1994) pp 351-367.\n * for a discussion of the algorithm.\n * See also http://members.ozemail.com.au/~dekker/NEUQUANT.HTML\n *\n * Any party obtaining a copy of these files from the author, directly or\n * indirectly, is granted, free of charge, a full and unrestricted irrevocable,\n * world-wide, paid up, royalty-free, nonexclusive right and license to deal\n * in this software and documentation files (the \"Software\"), including without\n * limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,\n * and/or sell copies of the Software, and to permit persons who receive\n * copies from any such party to do so, with the only requirement being\n * that this copyright notice remain intact.\n *\n * (JavaScript port 2012 by Johan Nordberg)\n */\n\nvar ncycles = 100; // number of learning cycles\nvar netsize = 256; // number of colors used\nvar maxnetpos = netsize - 1;\n\n// defs for freq and bias\nvar netbiasshift = 4; // bias for colour values\nvar intbiasshift = 16; // bias for fractions\nvar intbias = (1 << intbiasshift);\nvar gammashift = 10;\nvar gamma = (1 << gammashift);\nvar betashift = 10;\nvar beta = (intbias >> betashift); /* beta = 1/1024 */\nvar betagamma = (intbias << (gammashift - betashift));\n\n// defs for decreasing radius factor\nvar initrad = (netsize >> 3); // for 256 cols, radius starts\nvar radiusbiasshift = 6; // at 32.0 biased by 6 bits\nvar radiusbias = (1 << radiusbiasshift);\nvar initradius = (initrad * radiusbias); //and decreases by a\nvar radiusdec = 30; // factor of 1/30 each cycle\n\n// defs for decreasing alpha factor\nvar alphabiasshift = 10; // alpha starts at 1.0\nvar initalpha = (1 << alphabiasshift);\nvar alphadec; // biased by 10 bits\n\n/* radbias and alpharadbias used for radpower calculation */\nvar radbiasshift = 8;\nvar radbias = (1 << radbiasshift);\nvar alpharadbshift = (alphabiasshift + radbiasshift);\nvar alpharadbias = (1 << alpharadbshift);\n\n// four primes near 500 - assume no image has a length so large that it is\n// divisible by all four primes\nvar prime1 = 499;\nvar prime2 = 491;\nvar prime3 = 487;\nvar prime4 = 503;\nvar minpicturebytes = (3 * prime4);\n\n/*\n Constructor: NeuQuant\n\n Arguments:\n\n pixels - array of pixels in RGB format\n samplefac - sampling factor 1 to 30 where lower is better quality\n\n >\n > pixels = [r, g, b, r, g, b, r, g, b, ..]\n >\n*/\nfunction NeuQuant(pixels, samplefac) {\n var network; // int[netsize][4]\n var netindex; // for network lookup - really 256\n\n // bias and freq arrays for learning\n var bias;\n var freq;\n var radpower;\n\n /*\n Private Method: init\n\n sets up arrays\n */\n function init() {\n network = [];\n netindex = new Int32Array(256);\n bias = new Int32Array(netsize);\n freq = new Int32Array(netsize);\n radpower = new Int32Array(netsize >> 3);\n\n var i, v;\n for (i = 0; i < netsize; i++) {\n v = (i << (netbiasshift + 8)) / netsize;\n network[i] = new Float64Array([v, v, v, 0]);\n //network[i] = [v, v, v, 0]\n freq[i] = intbias / netsize;\n bias[i] = 0;\n }\n }\n\n /*\n Private Method: unbiasnet\n\n unbiases network to give byte values 0..255 and record position i to prepare for sort\n */\n function unbiasnet() {\n for (var i = 0; i < netsize; i++) {\n network[i][0] >>= netbiasshift;\n network[i][1] >>= netbiasshift;\n network[i][2] >>= netbiasshift;\n network[i][3] = i; // record color number\n }\n }\n\n /*\n Private Method: altersingle\n\n moves neuron *i* towards biased (b,g,r) by factor *alpha*\n */\n function altersingle(alpha, i, b, g, r) {\n network[i][0] -= (alpha * (network[i][0] - b)) / initalpha;\n network[i][1] -= (alpha * (network[i][1] - g)) / initalpha;\n network[i][2] -= (alpha * (network[i][2] - r)) / initalpha;\n }\n\n /*\n Private Method: alterneigh\n\n moves neurons in *radius* around index *i* towards biased (b,g,r) by factor *alpha*\n */\n function alterneigh(radius, i, b, g, r) {\n var lo = Math.abs(i - radius);\n var hi = Math.min(i + radius, netsize);\n\n var j = i + 1;\n var k = i - 1;\n var m = 1;\n\n var p, a;\n while ((j < hi) || (k > lo)) {\n a = radpower[m++];\n\n if (j < hi) {\n p = network[j++];\n p[0] -= (a * (p[0] - b)) / alpharadbias;\n p[1] -= (a * (p[1] - g)) / alpharadbias;\n p[2] -= (a * (p[2] - r)) / alpharadbias;\n }\n\n if (k > lo) {\n p = network[k--];\n p[0] -= (a * (p[0] - b)) / alpharadbias;\n p[1] -= (a * (p[1] - g)) / alpharadbias;\n p[2] -= (a * (p[2] - r)) / alpharadbias;\n }\n }\n }\n\n /*\n Private Method: contest\n\n searches for biased BGR values\n */\n function contest(b, g, r) {\n /*\n finds closest neuron (min dist) and updates freq\n finds best neuron (min dist-bias) and returns position\n for frequently chosen neurons, freq[i] is high and bias[i] is negative\n bias[i] = gamma * ((1 / netsize) - freq[i])\n */\n\n var bestd = ~(1 << 31);\n var bestbiasd = bestd;\n var bestpos = -1;\n var bestbiaspos = bestpos;\n\n var i, n, dist, biasdist, betafreq;\n for (i = 0; i < netsize; i++) {\n n = network[i];\n\n dist = Math.abs(n[0] - b) + Math.abs(n[1] - g) + Math.abs(n[2] - r);\n if (dist < bestd) {\n bestd = dist;\n bestpos = i;\n }\n\n biasdist = dist - ((bias[i]) >> (intbiasshift - netbiasshift));\n if (biasdist < bestbiasd) {\n bestbiasd = biasdist;\n bestbiaspos = i;\n }\n\n betafreq = (freq[i] >> betashift);\n freq[i] -= betafreq;\n bias[i] += (betafreq << gammashift);\n }\n\n freq[bestpos] += beta;\n bias[bestpos] -= betagamma;\n\n return bestbiaspos;\n }\n\n /*\n Private Method: inxbuild\n\n sorts network and builds netindex[0..255]\n */\n function inxbuild() {\n var i, j, p, q, smallpos, smallval, previouscol = 0, startpos = 0;\n for (i = 0; i < netsize; i++) {\n p = network[i];\n smallpos = i;\n smallval = p[1]; // index on g\n // find smallest in i..netsize-1\n for (j = i + 1; j < netsize; j++) {\n q = network[j];\n if (q[1] < smallval) { // index on g\n smallpos = j;\n smallval = q[1]; // index on g\n }\n }\n q = network[smallpos];\n // swap p (i) and q (smallpos) entries\n if (i != smallpos) {\n j = q[0]; q[0] = p[0]; p[0] = j;\n j = q[1]; q[1] = p[1]; p[1] = j;\n j = q[2]; q[2] = p[2]; p[2] = j;\n j = q[3]; q[3] = p[3]; p[3] = j;\n }\n // smallval entry is now in position i\n\n if (smallval != previouscol) {\n netindex[previouscol] = (startpos + i) >> 1;\n for (j = previouscol + 1; j < smallval; j++)\n netindex[j] = i;\n previouscol = smallval;\n startpos = i;\n }\n }\n netindex[previouscol] = (startpos + maxnetpos) >> 1;\n for (j = previouscol + 1; j < 256; j++)\n netindex[j] = maxnetpos; // really 256\n }\n\n /*\n Private Method: inxsearch\n\n searches for BGR values 0..255 and returns a color index\n */\n function inxsearch(b, g, r) {\n var a, p, dist;\n\n var bestd = 1000; // biggest possible dist is 256*3\n var best = -1;\n\n var i = netindex[g]; // index on g\n var j = i - 1; // start at netindex[g] and work outwards\n\n while ((i < netsize) || (j >= 0)) {\n if (i < netsize) {\n p = network[i];\n dist = p[1] - g; // inx key\n if (dist >= bestd) i = netsize; // stop iter\n else {\n i++;\n if (dist < 0) dist = -dist;\n a = p[0] - b; if (a < 0) a = -a;\n dist += a;\n if (dist < bestd) {\n a = p[2] - r; if (a < 0) a = -a;\n dist += a;\n if (dist < bestd) {\n bestd = dist;\n best = p[3];\n }\n }\n }\n }\n if (j >= 0) {\n p = network[j];\n dist = g - p[1]; // inx key - reverse dif\n if (dist >= bestd) j = -1; // stop iter\n else {\n j--;\n if (dist < 0) dist = -dist;\n a = p[0] - b; if (a < 0) a = -a;\n dist += a;\n if (dist < bestd) {\n a = p[2] - r; if (a < 0) a = -a;\n dist += a;\n if (dist < bestd) {\n bestd = dist;\n best = p[3];\n }\n }\n }\n }\n }\n\n return best;\n }\n\n /*\n Private Method: learn\n\n \"Main Learning Loop\"\n */\n function learn() {\n var i;\n\n var lengthcount = pixels.length;\n var alphadec = 30 + ((samplefac - 1) / 3);\n var samplepixels = lengthcount / (3 * samplefac);\n var delta = ~~(samplepixels / ncycles);\n var alpha = initalpha;\n var radius = initradius;\n\n var rad = radius >> radiusbiasshift;\n\n if (rad <= 1) rad = 0;\n for (i = 0; i < rad; i++)\n radpower[i] = alpha * (((rad * rad - i * i) * radbias) / (rad * rad));\n\n var step;\n if (lengthcount < minpicturebytes) {\n samplefac = 1;\n step = 3;\n } else if ((lengthcount % prime1) !== 0) {\n step = 3 * prime1;\n } else if ((lengthcount % prime2) !== 0) {\n step = 3 * prime2;\n } else if ((lengthcount % prime3) !== 0) {\n step = 3 * prime3;\n } else {\n step = 3 * prime4;\n }\n\n var b, g, r, j;\n var pix = 0; // current pixel\n\n i = 0;\n while (i < samplepixels) {\n b = (pixels[pix] & 0xff) << netbiasshift;\n g = (pixels[pix + 1] & 0xff) << netbiasshift;\n r = (pixels[pix + 2] & 0xff) << netbiasshift;\n\n j = contest(b, g, r);\n\n altersingle(alpha, j, b, g, r);\n if (rad !== 0) alterneigh(rad, j, b, g, r); // alter neighbours\n\n pix += step;\n if (pix >= lengthcount) pix -= lengthcount;\n\n i++;\n\n if (delta === 0) delta = 1;\n if (i % delta === 0) {\n alpha -= alpha / alphadec;\n radius -= radius / radiusdec;\n rad = radius >> radiusbiasshift;\n\n if (rad <= 1) rad = 0;\n for (j = 0; j < rad; j++)\n radpower[j] = alpha * (((rad * rad - j * j) * radbias) / (rad * rad));\n }\n }\n }\n\n /*\n Method: buildColormap\n\n 1. initializes network\n 2. trains it\n 3. removes misconceptions\n 4. builds colorindex\n */\n function buildColormap() {\n init();\n learn();\n unbiasnet();\n inxbuild();\n }\n this.buildColormap = buildColormap;\n\n /*\n Method: getColormap\n\n builds colormap from the index\n\n returns array in the format:\n\n >\n > [r, g, b, r, g, b, r, g, b, ..]\n >\n */\n function getColormap() {\n var map = [];\n var index = [];\n\n for (var i = 0; i < netsize; i++)\n index[network[i][3]] = i;\n\n var k = 0;\n for (var l = 0; l < netsize; l++) {\n var j = index[l];\n map[k++] = (network[j][0]);\n map[k++] = (network[j][1]);\n map[k++] = (network[j][2]);\n }\n return map;\n }\n this.getColormap = getColormap;\n\n /*\n Method: lookupRGB\n\n looks for the closest *r*, *g*, *b* color in the map and\n returns its index\n */\n this.lookupRGB = inxsearch;\n}\n\nmodule.exports = NeuQuant;\n","GIFEncoder = require './GIFEncoder.js'\n\nrenderFrame = (frame) ->\n encoder = new GIFEncoder frame.width, frame.height\n\n if frame.index is 0\n encoder.writeHeader()\n else\n encoder.firstFrame = false\n\n encoder.setTransparent frame.transparent\n encoder.setRepeat frame.repeat\n encoder.setDelay frame.delay\n encoder.setQuality frame.quality\n encoder.setDither frame.dither\n encoder.setGlobalPalette frame.globalPalette\n encoder.addFrame frame.data\n encoder.finish() if frame.last\n if frame.globalPalette == true\n frame.globalPalette = encoder.getGlobalPalette()\n\n stream = encoder.stream()\n frame.data = stream.pages\n frame.cursor = stream.cursor\n frame.pageSize = stream.constructor.pageSize\n\n if frame.canTransfer\n transfer = (page.buffer for page in frame.data)\n self.postMessage frame, transfer\n else\n self.postMessage frame\n\nself.onmessage = (event) -> renderFrame event.data\n"]} -------------------------------------------------------------------------------- /src/assets/keifont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/inaniwaudon/genkotsu/ef9f0a97d0c340b25ec101bca6af28f49382fea5/src/assets/keifont.ttf -------------------------------------------------------------------------------- /src/components/GenkotsuDrawer.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect, useRef, useTransition } from "react"; 2 | import styled from "styled-components"; 3 | import { createGif, drawGenkotsu, drawerHeight, drawerWidth } from "@/libs/draw"; 4 | 5 | const maxWidth = 400 * (16 / 9); 6 | 7 | const Wrapper = styled.section` 8 | width: 100%; 9 | max-width: ${maxWidth}px; 10 | `; 11 | 12 | const Canvas = styled.canvas` 13 | height: 400px; 14 | 15 | @media screen and (max-width: ${maxWidth + 32 * 2}px) { 16 | width: 100%; 17 | height: auto; 18 | } 19 | `; 20 | 21 | const Navigation = styled.div` 22 | display: flex; 23 | 24 | @media screen and (max-width: ${maxWidth + 32 * 2}px) { 25 | flex-direction: column; 26 | gap: 8px; 27 | } 28 | `; 29 | 30 | const Download = styled.a` 31 | width: 100%; 32 | max-width: 300px; 33 | text-align: center; 34 | margin: 16px auto 0 auto; 35 | padding: 8px 0; 36 | border-radius: 4px; 37 | cursor: pointer; 38 | box-shadow: 0 1px 6px rgba(0, 0, 0, 0.1); 39 | display: block; 40 | `; 41 | 42 | interface GenkotsuDrawerProps { 43 | text: string; 44 | } 45 | 46 | const GenkotsuDrawer = ({ text }: GenkotsuDrawerProps) => { 47 | const [_, startTransition] = useTransition(); 48 | 49 | const canvasRef = useRef(null); 50 | 51 | useEffect(() => { 52 | startTransition(() => { 53 | if (canvasRef.current) { 54 | const context = canvasRef.current.getContext("2d"); 55 | if (!context) { 56 | return; 57 | } 58 | document.fonts.ready.then(() => 59 | drawGenkotsu(text, drawerWidth, drawerHeight, context) 60 | ) 61 | } 62 | }); 63 | }, [text]); 64 | 65 | const downloadImage = () => { 66 | if (canvasRef.current) { 67 | const link = document.createElement("a"); 68 | link.href = canvasRef.current.toDataURL("image/png"); 69 | link.download = "genkotsu.png"; 70 | link.click(); 71 | } 72 | }; 73 | 74 | const downloadGif = () => { 75 | createGif(text); 76 | }; 77 | 78 | return ( 79 | 80 | 81 | 82 | 画像をダウンロード 83 | GIF 画像をダウンロード 84 | 85 | 86 | ); 87 | }; 88 | 89 | export default GenkotsuDrawer; 90 | -------------------------------------------------------------------------------- /src/libs/draw.ts: -------------------------------------------------------------------------------- 1 | import GIF from "gif.js"; 2 | import { keiFont } from "@/styles/localFonts"; 3 | 4 | const fontSize = 230; 5 | const descender = 0.88; 6 | 7 | export const drawerWidth = 1920 / 2; 8 | export const drawerHeight = 1080 / 2; 9 | 10 | export const drawText = ( 11 | text: string, 12 | fills: boolean, 13 | strokeStyle: string, 14 | context: CanvasRenderingContext2D 15 | ) => { 16 | context.fillStyle = "#000"; 17 | context.strokeStyle = strokeStyle; 18 | context.lineWidth = 6; 19 | // `var(--kei-font)` is not worked somehow 20 | context.font = `${fontSize}px ${keiFont.style.fontFamily}`; 21 | context.textAlign = "center"; 22 | 23 | const drawChar = (x: number, y: number, char: string, deg: number) => { 24 | const transformY = y + fontSize / 2; 25 | const radian = (deg / 360) * Math.PI * 2; 26 | context.translate(x, transformY); 27 | context.rotate(radian); 28 | context.strokeText(char, 0, fontSize * (-0.5 + descender)); 29 | if (fills) { 30 | context.fillText(char, 0, fontSize * (-0.5 + descender)); 31 | } 32 | context.rotate(-radian); 33 | context.translate(-x, -transformY); 34 | }; 35 | 36 | const textWidth = fontSize * 2.2; 37 | const textHeight = fontSize * 2; 38 | 39 | const x = (drawerWidth - textWidth) / 2; 40 | const x0 = x + fontSize * 0.5; 41 | const x1 = x + fontSize * (1.5 + 0.2); 42 | 43 | const y = (drawerHeight - textHeight) / 2; 44 | const y0 = y; 45 | const y1 = y + fontSize; 46 | 47 | const xList = [x0, x1, x0, x1]; 48 | const yList = [y0, y0, y1, y1]; 49 | const degList = [-10, 8, -14, 4]; 50 | for (let i = 0; i < Math.min(text.length, 4); i++) { 51 | drawChar(xList[i], yList[i], text[i], degList[i]); 52 | } 53 | }; 54 | 55 | export const drawGenkotsu = ( 56 | text: string, 57 | width: number, 58 | height: number, 59 | context: CanvasRenderingContext2D 60 | ) => { 61 | // draw background 62 | const color0 = "#ec826a"; 63 | const color1 = "#bf5677"; 64 | const color2 = "#525abf"; 65 | const gradient = context.createRadialGradient( 66 | width / 2, 67 | height / 2, 68 | drawerHeight / 4, 69 | width / 2, 70 | height / 2, 71 | drawerHeight 72 | ); 73 | gradient.addColorStop(0, color0); 74 | gradient.addColorStop(0.3, color1); 75 | gradient.addColorStop(1, color2); 76 | context.fillStyle = gradient; 77 | context.fillRect(0, 0, width, height); 78 | 79 | context.fillStyle = "#ff6"; 80 | context.beginPath(); 81 | 82 | context.shadowColor = "rgba(255,255,255,0.4)"; 83 | context.shadowOffsetX = 0; 84 | context.shadowOffsetY = 0; 85 | context.shadowBlur = 60; 86 | 87 | const diffX = (width - drawerWidth) / 2; 88 | const diffY = (height - drawerHeight) / 2; 89 | 90 | context.translate(diffX, diffY); 91 | for (let i = 0; i < 6; i++) { 92 | context.moveTo(drawerWidth / 2, 0); 93 | const angle = (i - 3) * 0.06 + Math.random() * 0.1; 94 | const from = Math.abs(i - 3) * 60 + Math.random() * 20; 95 | const to = drawerHeight - Math.random() * 80; 96 | const count = 20; 97 | 98 | const xList: number[] = []; 99 | const yList: number[] = []; 100 | 101 | for (let i = 0; i < count; i++) { 102 | xList.push(drawerWidth / 2 + angle * 100 * i + Math.random() * 40); 103 | yList.push(((to - from) / count) * i + Math.random() * 30 + from); 104 | const x = xList[i] - 10; 105 | const y = yList[i] + 10; 106 | context.lineTo(x, y); 107 | } 108 | for (let i = count; i >= 0; i--) { 109 | const x = xList[i] + 10; 110 | const y = yList[i]; 111 | context.lineTo(x, y); 112 | } 113 | context.fill(); 114 | } 115 | context.translate(-diffX, -diffY); 116 | 117 | context.shadowColor = "transparent"; 118 | context.shadowBlur = 0; 119 | 120 | // draw a base text 121 | const textCanvas = document.createElement("canvas"); 122 | textCanvas.width = drawerWidth; 123 | textCanvas.height = drawerHeight; 124 | const textContext = textCanvas.getContext("2d"); 125 | if (!textContext) { 126 | return; 127 | } 128 | 129 | // appearance 130 | const loops = 100; 131 | for (let i = 0; i < loops; i++) { 132 | textContext.clearRect(0, 0, drawerWidth, drawerHeight); 133 | const scale = 1.0 - (1.0 / loops) * i; 134 | const scaledWidth = drawerWidth * scale; 135 | const scaledHeight = drawerHeight * scale; 136 | const t = i / loops; 137 | const l0 = 60 + (1 - Math.pow(1 - t, 3)) * 40; 138 | drawText(text, false, `hsl(0, 100%, ${l0}%)`, textContext); 139 | context.drawImage( 140 | textCanvas, 141 | (width - scaledWidth) / 2, 142 | (height - scaledHeight) / 2, 143 | scaledWidth, 144 | scaledHeight 145 | ); 146 | } 147 | context.translate(diffX, diffY); 148 | drawText(text, true, "#fff", context); 149 | context.translate(-diffX, -diffY); 150 | }; 151 | 152 | export const createGif = (text: string) => { 153 | const canvas = document.createElement("canvas"); 154 | const originalCanvas = document.createElement("canvas"); 155 | 156 | const maxWidth = drawerWidth * 2; 157 | const maxHeight = drawerHeight * 2; 158 | canvas.width = drawerWidth; 159 | canvas.height = drawerHeight; 160 | originalCanvas.width = maxWidth; 161 | originalCanvas.height = maxHeight; 162 | const context = canvas.getContext("2d"); 163 | const originalContext = originalCanvas.getContext("2d"); 164 | 165 | if (!context || !originalContext) { 166 | return; 167 | } 168 | drawGenkotsu(text, maxWidth, maxHeight, originalContext); 169 | 170 | const gif = new GIF({ 171 | workers: 2, 172 | quality: 10, 173 | }); 174 | 175 | const options: { scale: number; x?: number; y?: number }[] = [ 176 | { scale: 1.0 }, 177 | { scale: 1.0 }, 178 | { scale: 1.0 }, 179 | { scale: 1.0 }, 180 | { scale: 1.1 }, 181 | { scale: 1.2 }, 182 | { scale: 1.4 }, 183 | { scale: 1.6 }, 184 | { scale: 1.2 }, 185 | { scale: 1.2 }, 186 | { scale: 1.6 }, 187 | { scale: 1.6 }, 188 | { scale: 1.2 }, 189 | { scale: 1.2 }, 190 | { scale: 1.6 }, 191 | { scale: 1.6 }, 192 | { scale: 1.2 }, 193 | { scale: 1.2 }, 194 | { scale: 1.6 }, 195 | { scale: 1.6 }, 196 | { scale: 1.3 }, 197 | { scale: 1.3, x: -0.1, y: -0.1 }, 198 | { scale: 1.3, x: -0.1, y: -0.1 }, 199 | { scale: 1.3, x: 0.15, y: 0.1 }, 200 | { scale: 1.3, x: 0.15, y: 0.1 }, 201 | { scale: 1.3, x: -0.1, y: -0.1 }, 202 | { scale: 1.3, x: -0.1, y: -0.1 }, 203 | { scale: 1.3, x: 0.1, y: 0.15 }, 204 | { scale: 1.3, x: 0.1, y: 0.15 }, 205 | { scale: 1.3, x: -0.1, y: 0.1 }, 206 | { scale: 1.3, x: -0.1, y: 0.1 }, 207 | { scale: 1.0 }, 208 | { scale: 1.0 }, 209 | { scale: 1.0 }, 210 | { scale: 1.0 }, 211 | ]; 212 | for (let i = 0; i < options.length; i++) { 213 | const width = maxWidth * options[i].scale; 214 | const height = maxHeight * options[i].scale; 215 | context.drawImage( 216 | originalCanvas, 217 | 0, 218 | 0, 219 | maxWidth, 220 | maxHeight, 221 | (drawerWidth - width) / 2 + drawerWidth * (options[i].x ?? 0), 222 | (drawerHeight - height) / 2 + drawerHeight * (options[i].y ?? 0), 223 | maxWidth * options[i].scale, 224 | maxHeight * options[i].scale 225 | ); 226 | gif.addFrame(canvas, { delay: 30, copy: true }); 227 | } 228 | 229 | gif.on("finished", (blob: Blob) => { 230 | window.open(URL.createObjectURL(blob)); 231 | }); 232 | 233 | gif.render(); 234 | }; 235 | -------------------------------------------------------------------------------- /src/pages/_app.tsx: -------------------------------------------------------------------------------- 1 | import { GlobalStyle } from '@/styles/globalStyle' 2 | import type { AppProps } from 'next/app' 3 | import React from 'react' 4 | 5 | export default function App({ Component, pageProps }: AppProps) { 6 | return ( 7 | 8 | 9 | 10 | 11 | ) 12 | } 13 | -------------------------------------------------------------------------------- /src/pages/_document.tsx: -------------------------------------------------------------------------------- 1 | import Document, { 2 | DocumentContext, 3 | Head, 4 | Html, 5 | Main, 6 | NextScript, 7 | } from "next/document"; 8 | import { ServerStyleSheet } from "styled-components"; 9 | 10 | export default class MyDocument extends Document { 11 | static async getInitialProps(ctx: DocumentContext) { 12 | const sheet = new ServerStyleSheet(); 13 | const originalRenderPage = ctx.renderPage; 14 | 15 | try { 16 | ctx.renderPage = () => 17 | originalRenderPage({ 18 | enhanceApp: (App) => (props) => 19 | sheet.collectStyles(), 20 | }); 21 | 22 | const initialProps = await Document.getInitialProps(ctx); 23 | return { 24 | ...initialProps, 25 | styles: ( 26 | <> 27 | {initialProps.styles} 28 | {sheet.getStyleElement()} 29 | 30 | ), 31 | }; 32 | } finally { 33 | sheet.seal(); 34 | } 35 | } 36 | 37 | render() { 38 | return ( 39 | 40 | 41 | 42 | 43 | 44 |
45 | 46 | 47 | 48 | ); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/pages/index.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect, useState } from "react"; 2 | import Script from "next/script"; 3 | import Head from "next/head"; 4 | import styled from "styled-components"; 5 | import GenkotsuDrawer from "@/components/GenkotsuDrawer"; 6 | 7 | const Main = styled.main` 8 | font-family: sans-serif; 9 | margin: 32px; 10 | `; 11 | 12 | const Input = styled.input` 13 | max-width: 100%; 14 | font-size: 48px; 15 | font-family: var(--kei-font); 16 | margin-bottom: 24px; 17 | border-top: none; 18 | border-right: none; 19 | border-left: none; 20 | border-bottom: solid 1px #ccc; 21 | `; 22 | 23 | const Footer = styled.footer` 24 | margin-top: 16px; 25 | `; 26 | 27 | const Anchor = styled.a` 28 | color: #666; 29 | text-underline-offset: 4px; 30 | `; 31 | 32 | const SnsParagraph = styled.div` 33 | display: flex; 34 | align-items: center; 35 | gap: 16px; 36 | `; 37 | 38 | const Index = () => { 39 | const [text, setText] = useState("げんこつ"); 40 | const [isClient, setIsClient] = useState(false); 41 | useEffect(() => { 42 | setIsClient(true); 43 | }, []); 44 | 45 | return ( 46 | <> 47 | 48 | げんこつ 49 | 50 | 51 | 52 | 53 | 54 | 58 | 59 | 63 | 64 | 68 | 69 |
70 |
71 | setText(e.currentTarget.value)} 75 | /> 76 |
77 | 78 |