├── styles.css ├── README.md ├── avc.js.mem ├── stream.js ├── raw.h264.js ├── Rawh264Demo.html ├── Player.js ├── mp4.js ├── YUVWebGLCanvas.js └── jquery └── jquery.min.js /styles.css: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # RemoteDroid_web 2 | The web client for RemoteDroid 3 | -------------------------------------------------------------------------------- /avc.js.mem: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/omerjerk/RemoteDroid_web/HEAD/avc.js.mem -------------------------------------------------------------------------------- /stream.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var Stream = (function stream() { 4 | function constructor(url) { 5 | this.url = url; 6 | } 7 | 8 | constructor.prototype = { 9 | readAll: function(progress, complete) { 10 | var xhr = new XMLHttpRequest(); 11 | var async = true; 12 | xhr.open("GET", this.url, async); 13 | xhr.responseType = "arraybuffer"; 14 | if (progress) { 15 | xhr.onprogress = function (event) { 16 | progress(xhr.response, event.loaded, event.total); 17 | }; 18 | } 19 | xhr.onreadystatechange = function (event) { 20 | if (xhr.readyState === 4) { 21 | complete(xhr.response); 22 | // var byteArray = new Uint8Array(xhr.response); 23 | // var array = Array.prototype.slice.apply(byteArray); 24 | // complete(array); 25 | } 26 | } 27 | xhr.send(null); 28 | } 29 | }; 30 | return constructor; 31 | })(); 32 | 33 | 34 | -------------------------------------------------------------------------------- /raw.h264.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | var toUint8Array = function(data){ 4 | // var raw = window.atob(parStr); 5 | var rawLength = data.length; 6 | // var array = new Uint8Array(new ArrayBuffer(rawLength)); 7 | var array = new Uint8Array(data); 8 | 9 | var i; 10 | for(i = 0; i < rawLength; i++) { 11 | array[i] = data.charCodeAt(i); 12 | } 13 | return array; 14 | }; 15 | 16 | var RawRenderer = function(_useWorker) { 17 | // this.url = url; 18 | 19 | this.player = new Player({ 20 | useWorker : _useWorker, 21 | workerFile : "Decoder.js" 22 | }); 23 | /* 24 | this.player = new Decoder({ 25 | "rgb" : "false" 26 | });*/ 27 | /* 28 | this.onPictureDecoded = function(buffer, width, height) { 29 | console.log("on picture decoded"); 30 | };*/ 31 | this.fileReader = new FileReader(); 32 | var that = this; 33 | this.fileReader.onload = function() { 34 | that.onDecodeMessage(that.fileReader.result); 35 | }; 36 | }; 37 | 38 | RawRenderer.prototype.getCanvas = function() { 39 | return this.player.canvas; 40 | }; 41 | 42 | RawRenderer.prototype.render = function(data) { 43 | this.fileReader.readAsArrayBuffer(data); 44 | }; 45 | 46 | RawRenderer.prototype.onDecodeMessage = function(data) { 47 | var array = new Uint8Array(data); 48 | console.log(array); 49 | this.player.decode(array); 50 | }; 51 | -------------------------------------------------------------------------------- /Rawh264Demo.html: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 |
416 | * var canvas = new WebGLCanvas(document.getElementById('canvas'), new Size(512, 512);
417 | * canvas.texture.fill(data);
418 | * canvas.drawScene();
419 | *
420 | */
421 | var WebGLCanvas = (function () {
422 |
423 | var vertexShaderScript = Script.createFromSource("x-shader/x-vertex", text([
424 | "attribute vec3 aVertexPosition;",
425 | "attribute vec2 aTextureCoord;",
426 | "uniform mat4 uMVMatrix;",
427 | "uniform mat4 uPMatrix;",
428 | "varying highp vec2 vTextureCoord;",
429 | "void main(void) {",
430 | " gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);",
431 | " vTextureCoord = aTextureCoord;",
432 | "}"
433 | ]));
434 |
435 | var fragmentShaderScript = Script.createFromSource("x-shader/x-fragment", text([
436 | "precision highp float;",
437 | "varying highp vec2 vTextureCoord;",
438 | "uniform sampler2D texture;",
439 | "void main(void) {",
440 | " gl_FragColor = texture2D(texture, vTextureCoord);",
441 | "}"
442 | ]));
443 |
444 | function constructor(canvas, size, useFrameBuffer) {
445 | this.canvas = canvas;
446 | this.size = size;
447 | this.canvas.width = size.w;
448 | this.canvas.height = size.h;
449 |
450 | this.onInitWebGL();
451 | this.onInitShaders();
452 | initBuffers.call(this);
453 | if (useFrameBuffer) {
454 | initFramebuffer.call(this);
455 | }
456 | this.onInitTextures();
457 | initScene.call(this);
458 | }
459 |
460 | /**
461 | * Initialize a frame buffer so that we can render off-screen.
462 | */
463 | function initFramebuffer() {
464 | var gl = this.gl;
465 |
466 | // Create framebuffer object and texture.
467 | this.framebuffer = gl.createFramebuffer();
468 | gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
469 | this.framebufferTexture = new Texture(this.gl, this.size, gl.RGBA);
470 |
471 | // Create and allocate renderbuffer for depth data.
472 | var renderbuffer = gl.createRenderbuffer();
473 | gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);
474 | gl.renderbufferStorage(gl.RENDERBUFFER, gl.DEPTH_COMPONENT16, this.size.w, this.size.h);
475 |
476 | // Attach texture and renderbuffer to the framebuffer.
477 | gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, this.framebufferTexture.texture, 0);
478 | gl.framebufferRenderbuffer(gl.FRAMEBUFFER, gl.DEPTH_ATTACHMENT, gl.RENDERBUFFER, renderbuffer);
479 | }
480 |
481 | /**
482 | * Initialize vertex and texture coordinate buffers for a plane.
483 | */
484 | function initBuffers() {
485 | var tmp;
486 | var gl = this.gl;
487 |
488 | // Create vertex position buffer.
489 | this.quadVPBuffer = gl.createBuffer();
490 | gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVPBuffer);
491 | tmp = [
492 | 1.0, 1.0, 0.0,
493 | -1.0, 1.0, 0.0,
494 | 1.0, -1.0, 0.0,
495 | -1.0, -1.0, 0.0];
496 |
497 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(tmp), gl.STATIC_DRAW);
498 | this.quadVPBuffer.itemSize = 3;
499 | this.quadVPBuffer.numItems = 4;
500 |
501 | /*
502 | +--------------------+
503 | | -1,1 (1) | 1,1 (0)
504 | | |
505 | | |
506 | | |
507 | | |
508 | | |
509 | | -1,-1 (3) | 1,-1 (2)
510 | +--------------------+
511 | */
512 |
513 | var scaleX = 1.0;
514 | var scaleY = 1.0;
515 |
516 | // Create vertex texture coordinate buffer.
517 | this.quadVTCBuffer = gl.createBuffer();
518 | gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVTCBuffer);
519 | tmp = [
520 | scaleX, 0.0,
521 | 0.0, 0.0,
522 | scaleX, scaleY,
523 | 0.0, scaleY,
524 | ];
525 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(tmp), gl.STATIC_DRAW);
526 | }
527 |
528 | function mvIdentity() {
529 | this.mvMatrix = Matrix.I(4);
530 | }
531 |
532 | function mvMultiply(m) {
533 | this.mvMatrix = this.mvMatrix.x(m);
534 | }
535 |
536 | function mvTranslate(m) {
537 | mvMultiply.call(this, Matrix.Translation($V([m[0], m[1], m[2]])).ensure4x4());
538 | }
539 |
540 | function setMatrixUniforms() {
541 | this.program.setMatrixUniform("uPMatrix", new Float32Array(this.perspectiveMatrix.flatten()));
542 | this.program.setMatrixUniform("uMVMatrix", new Float32Array(this.mvMatrix.flatten()));
543 | }
544 |
545 | function initScene() {
546 | var gl = this.gl;
547 |
548 | // Establish the perspective with which we want to view the
549 | // scene. Our field of view is 45 degrees, with a width/height
550 | // ratio of 640:480, and we only want to see objects between 0.1 units
551 | // and 100 units away from the camera.
552 |
553 | this.perspectiveMatrix = makePerspective(45, 1, 0.1, 100.0);
554 |
555 | // Set the drawing position to the "identity" point, which is
556 | // the center of the scene.
557 | mvIdentity.call(this);
558 |
559 | // Now move the drawing position a bit to where we want to start
560 | // drawing the square.
561 | mvTranslate.call(this, [0.0, 0.0, -2.4]);
562 |
563 | // Draw the cube by binding the array buffer to the cube's vertices
564 | // array, setting attributes, and pushing it to GL.
565 | gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVPBuffer);
566 | gl.vertexAttribPointer(this.vertexPositionAttribute, 3, gl.FLOAT, false, 0, 0);
567 |
568 | // Set the texture coordinates attribute for the vertices.
569 |
570 | gl.bindBuffer(gl.ARRAY_BUFFER, this.quadVTCBuffer);
571 | gl.vertexAttribPointer(this.textureCoordAttribute, 2, gl.FLOAT, false, 0, 0);
572 |
573 | this.onInitSceneTextures();
574 |
575 | setMatrixUniforms.call(this);
576 |
577 | if (this.framebuffer) {
578 | console.log("Bound Frame Buffer");
579 | gl.bindFramebuffer(gl.FRAMEBUFFER, this.framebuffer);
580 | }
581 | }
582 |
583 | constructor.prototype = {
584 | toString: function() {
585 | return "WebGLCanvas Size: " + this.size;
586 | },
587 | checkLastError: function (operation) {
588 | var err = this.gl.getError();
589 | if (err != this.gl.NO_ERROR) {
590 | var name = this.glNames[err];
591 | name = (name !== undefined) ? name + "(" + err + ")":
592 | ("Unknown WebGL ENUM (0x" + value.toString(16) + ")");
593 | if (operation) {
594 | console.log("WebGL Error: %s, %s", operation, name);
595 | } else {
596 | console.log("WebGL Error: %s", name);
597 | }
598 | console.trace();
599 | }
600 | },
601 | onInitWebGL: function () {
602 | try {
603 | this.gl = this.canvas.getContext("experimental-webgl");
604 | } catch(e) {}
605 |
606 | if (!this.gl) {
607 | error("Unable to initialize WebGL. Your browser may not support it.");
608 | }
609 | if (this.glNames) {
610 | return;
611 | }
612 | this.glNames = {};
613 | for (var propertyName in this.gl) {
614 | if (typeof this.gl[propertyName] == 'number') {
615 | this.glNames[this.gl[propertyName]] = propertyName;
616 | }
617 | }
618 | },
619 | onInitShaders: function() {
620 | this.program = new Program(this.gl);
621 | this.program.attach(new Shader(this.gl, vertexShaderScript));
622 | this.program.attach(new Shader(this.gl, fragmentShaderScript));
623 | this.program.link();
624 | this.program.use();
625 | this.vertexPositionAttribute = this.program.getAttributeLocation("aVertexPosition");
626 | this.gl.enableVertexAttribArray(this.vertexPositionAttribute);
627 | this.textureCoordAttribute = this.program.getAttributeLocation("aTextureCoord");;
628 | this.gl.enableVertexAttribArray(this.textureCoordAttribute);
629 | },
630 | onInitTextures: function () {
631 | var gl = this.gl;
632 | this.texture = new Texture(gl, this.size, gl.RGBA);
633 | },
634 | onInitSceneTextures: function () {
635 | this.texture.bind(0, this.program, "texture");
636 | },
637 | drawScene: function() {
638 | this.gl.drawArrays(this.gl.TRIANGLE_STRIP, 0, 4);
639 | },
640 | readPixels: function(buffer) {
641 | var gl = this.gl;
642 | gl.readPixels(0, 0, this.size.w, this.size.h, gl.RGBA, gl.UNSIGNED_BYTE, buffer);
643 | }
644 | };
645 | return constructor;
646 | })();
647 |
648 | var YUVWebGLCanvas = (function () {
649 | var vertexShaderScript = Script.createFromSource("x-shader/x-vertex", text([
650 | "attribute vec3 aVertexPosition;",
651 | "attribute vec2 aTextureCoord;",
652 | "uniform mat4 uMVMatrix;",
653 | "uniform mat4 uPMatrix;",
654 | "varying highp vec2 vTextureCoord;",
655 | "void main(void) {",
656 | " gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);",
657 | " vTextureCoord = aTextureCoord;",
658 | "}"
659 | ]));
660 |
661 | var fragmentShaderScriptOld = Script.createFromSource("x-shader/x-fragment", text([
662 | "precision highp float;",
663 | "varying highp vec2 vTextureCoord;",
664 | "uniform sampler2D YTexture;",
665 | "uniform sampler2D UTexture;",
666 | "uniform sampler2D VTexture;",
667 |
668 | "void main(void) {",
669 | " vec3 YUV = vec3",
670 | " (",
671 | " texture2D(YTexture, vTextureCoord).x * 1.1643828125, // premultiply Y",
672 | " texture2D(UTexture, vTextureCoord).x,",
673 | " texture2D(VTexture, vTextureCoord).x",
674 | " );",
675 | " gl_FragColor = vec4",
676 | " (",
677 | " YUV.x + 1.59602734375 * YUV.z - 0.87078515625,",
678 | " YUV.x - 0.39176171875 * YUV.y - 0.81296875 * YUV.z + 0.52959375,",
679 | " YUV.x + 2.017234375 * YUV.y - 1.081390625,",
680 | " 1",
681 | " );",
682 | "}"
683 | ]));
684 |
685 | var fragmentShaderScriptSimple = Script.createFromSource("x-shader/x-fragment", text([
686 | "precision highp float;",
687 | "varying highp vec2 vTextureCoord;",
688 | "uniform sampler2D YTexture;",
689 | "uniform sampler2D UTexture;",
690 | "uniform sampler2D VTexture;",
691 |
692 | "void main(void) {",
693 | " gl_FragColor = texture2D(YTexture, vTextureCoord);",
694 | "}"
695 | ]));
696 |
697 | var fragmentShaderScript = Script.createFromSource("x-shader/x-fragment", text([
698 | "precision highp float;",
699 | "varying highp vec2 vTextureCoord;",
700 | "uniform sampler2D YTexture;",
701 | "uniform sampler2D UTexture;",
702 | "uniform sampler2D VTexture;",
703 | "const mat4 YUV2RGB = mat4",
704 | "(",
705 | " 1.1643828125, 0, 1.59602734375, -.87078515625,",
706 | " 1.1643828125, -.39176171875, -.81296875, .52959375,",
707 | " 1.1643828125, 2.017234375, 0, -1.081390625,",
708 | " 0, 0, 0, 1",
709 | ");",
710 |
711 | "void main(void) {",
712 | " gl_FragColor = vec4( texture2D(YTexture, vTextureCoord).x, texture2D(UTexture, vTextureCoord).x, texture2D(VTexture, vTextureCoord).x, 1) * YUV2RGB;",
713 | "}"
714 | ]));
715 |
716 |
717 | function constructor(canvas, size) {
718 | WebGLCanvas.call(this, canvas, size);
719 | }
720 |
721 | constructor.prototype = inherit(WebGLCanvas, {
722 | onInitShaders: function() {
723 | this.program = new Program(this.gl);
724 | this.program.attach(new Shader(this.gl, vertexShaderScript));
725 | this.program.attach(new Shader(this.gl, fragmentShaderScript));
726 | this.program.link();
727 | this.program.use();
728 | this.vertexPositionAttribute = this.program.getAttributeLocation("aVertexPosition");
729 | this.gl.enableVertexAttribArray(this.vertexPositionAttribute);
730 | this.textureCoordAttribute = this.program.getAttributeLocation("aTextureCoord");;
731 | this.gl.enableVertexAttribArray(this.textureCoordAttribute);
732 | },
733 | onInitTextures: function () {
734 | console.log("creatingTextures: size: " + this.size);
735 | this.YTexture = new Texture(this.gl, this.size);
736 | this.UTexture = new Texture(this.gl, this.size.getHalfSize());
737 | this.VTexture = new Texture(this.gl, this.size.getHalfSize());
738 | },
739 | onInitSceneTextures: function () {
740 | this.YTexture.bind(0, this.program, "YTexture");
741 | this.UTexture.bind(1, this.program, "UTexture");
742 | this.VTexture.bind(2, this.program, "VTexture");
743 | },
744 | fillYUVTextures: function(y, u, v) {
745 | this.YTexture.fill(y);
746 | this.UTexture.fill(u);
747 | this.VTexture.fill(v);
748 | },
749 | toString: function() {
750 | return "YUVCanvas Size: " + this.size;
751 | }
752 | });
753 |
754 | return constructor;
755 | })();
756 |
757 |
758 | var FilterWebGLCanvas = (function () {
759 | var vertexShaderScript = Script.createFromSource("x-shader/x-vertex", text([
760 | "attribute vec3 aVertexPosition;",
761 | "attribute vec2 aTextureCoord;",
762 | "uniform mat4 uMVMatrix;",
763 | "uniform mat4 uPMatrix;",
764 | "varying highp vec2 vTextureCoord;",
765 | "void main(void) {",
766 | " gl_Position = uPMatrix * uMVMatrix * vec4(aVertexPosition, 1.0);",
767 | " vTextureCoord = aTextureCoord;",
768 | "}"
769 | ]));
770 |
771 | var fragmentShaderScript = Script.createFromSource("x-shader/x-fragment", text([
772 | "precision highp float;",
773 | "varying highp vec2 vTextureCoord;",
774 | "uniform sampler2D FTexture;",
775 |
776 | "void main(void) {",
777 | " gl_FragColor = texture2D(FTexture, vTextureCoord);",
778 | "}"
779 | ]));
780 |
781 |
782 | function constructor(canvas, size, useFrameBuffer) {
783 | WebGLCanvas.call(this, canvas, size, useFrameBuffer);
784 | }
785 |
786 | constructor.prototype = inherit(WebGLCanvas, {
787 | onInitShaders: function() {
788 | this.program = new Program(this.gl);
789 | this.program.attach(new Shader(this.gl, vertexShaderScript));
790 | this.program.attach(new Shader(this.gl, fragmentShaderScript));
791 | this.program.link();
792 | this.program.use();
793 | this.vertexPositionAttribute = this.program.getAttributeLocation("aVertexPosition");
794 | this.gl.enableVertexAttribArray(this.vertexPositionAttribute);
795 | this.textureCoordAttribute = this.program.getAttributeLocation("aTextureCoord");
796 | this.gl.enableVertexAttribArray(this.textureCoordAttribute);
797 | },
798 | onInitTextures: function () {
799 | console.log("creatingTextures: size: " + this.size);
800 | this.FTexture = new Texture(this.gl, this.size, this.gl.RGBA);
801 | },
802 | onInitSceneTextures: function () {
803 | this.FTexture.bind(0, this.program, "FTexture");
804 | },
805 | process: function(buffer, output) {
806 | this.FTexture.fill(buffer);
807 | this.drawScene();
808 | this.readPixels(output);
809 | },
810 | toString: function() {
811 | return "FilterWebGLCanvas Size: " + this.size;
812 | }
813 | });
814 |
815 | return constructor;
816 | })();
817 |
818 |
819 | return YUVWebGLCanvas;
820 |
821 | }));
822 |
--------------------------------------------------------------------------------
/jquery/jquery.min.js:
--------------------------------------------------------------------------------
1 | /*! jQuery v1.8.3 jquery.com | jquery.org/license */
2 | (function(e,t){function _(e){var t=M[e]={};return v.each(e.split(y),function(e,n){t[n]=!0}),t}function H(e,n,r){if(r===t&&e.nodeType===1){var i="data-"+n.replace(P,"-$1").toLowerCase();r=e.getAttribute(i);if(typeof r=="string"){try{r=r==="true"?!0:r==="false"?!1:r==="null"?null:+r+""===r?+r:D.test(r)?v.parseJSON(r):r}catch(s){}v.data(e,n,r)}else r=t}return r}function B(e){var t;for(t in e){if(t==="data"&&v.isEmptyObject(e[t]))continue;if(t!=="toJSON")return!1}return!0}function et(){return!1}function tt(){return!0}function ut(e){return!e||!e.parentNode||e.parentNode.nodeType===11}function at(e,t){do e=e[t];while(e&&e.nodeType!==1);return e}function ft(e,t,n){t=t||0;if(v.isFunction(t))return v.grep(e,function(e,r){var i=!!t.call(e,r,e);return i===n});if(t.nodeType)return v.grep(e,function(e,r){return e===t===n});if(typeof t=="string"){var r=v.grep(e,function(e){return e.nodeType===1});if(it.test(t))return v.filter(t,r,!n);t=v.filter(t,r)}return v.grep(e,function(e,r){return v.inArray(e,t)>=0===n})}function lt(e){var t=ct.split("|"),n=e.createDocumentFragment();if(n.createElement)while(t.length)n.createElement(t.pop());return n}function Lt(e,t){return e.getElementsByTagName(t)[0]||e.appendChild(e.ownerDocument.createElement(t))}function At(e,t){if(t.nodeType!==1||!v.hasData(e))return;var n,r,i,s=v._data(e),o=v._data(t,s),u=s.events;if(u){delete o.handle,o.events={};for(n in u)for(r=0,i=u[n].length;r").appendTo(i.body),n=t.css("display");t.remove();if(n==="none"||n===""){Pt=i.body.appendChild(Pt||v.extend(i.createElement("iframe"),{frameBorder:0,width:0,height:0}));if(!Ht||!Pt.createElement)Ht=(Pt.contentWindow||Pt.contentDocument).document,Ht.write(""),Ht.close();t=Ht.body.appendChild(Ht.createElement(e)),n=Dt(t,"display"),i.body.removeChild(Pt)}return Wt[e]=n,n}function fn(e,t,n,r){var i;if(v.isArray(t))v.each(t,function(t,i){n||sn.test(e)?r(e,i):fn(e+"["+(typeof i=="object"?t:"")+"]",i,n,r)});else if(!n&&v.type(t)==="object")for(i in t)fn(e+"["+i+"]",t[i],n,r);else r(e,t)}function Cn(e){return function(t,n){typeof t!="string"&&(n=t,t="*");var r,i,s,o=t.toLowerCase().split(y),u=0,a=o.length;if(v.isFunction(n))for(;u)[^>]*$|#([\w\-]*)$)/,E=/^<(\w+)\s*\/?>(?:<\/\1>|)$/,S=/^[\],:{}\s]*$/,x=/(?:^|:|,)(?:\s*\[)+/g,T=/\\(?:["\\\/bfnrt]|u[\da-fA-F]{4})/g,N=/"[^"\\\r\n]*"|true|false|null|-?(?:\d\d*\.|)\d+(?:[eE][\-+]?\d+|)/g,C=/^-ms-/,k=/-([\da-z])/gi,L=function(e,t){return(t+"").toUpperCase()},A=function(){i.addEventListener?(i.removeEventListener("DOMContentLoaded",A,!1),v.ready()):i.readyState==="complete"&&(i.detachEvent("onreadystatechange",A),v.ready())},O={};v.fn=v.prototype={constructor:v,init:function(e,n,r){var s,o,u,a;if(!e)return this;if(e.nodeType)return this.context=this[0]=e,this.length=1,this;if(typeof e=="string"){e.charAt(0)==="<"&&e.charAt(e.length-1)===">"&&e.length>=3?s=[null,e,null]:s=w.exec(e);if(s&&(s[1]||!n)){if(s[1])return n=n instanceof v?n[0]:n,a=n&&n.nodeType?n.ownerDocument||n:i,e=v.parseHTML(s[1],a,!0),E.test(s[1])&&v.isPlainObject(n)&&this.attr.call(e,n,!0),v.merge(this,e);o=i.getElementById(s[2]);if(o&&o.parentNode){if(o.id!==s[2])return r.find(e);this.length=1,this[0]=o}return this.context=i,this.selector=e,this}return!n||n.jquery?(n||r).find(e):this.constructor(n).find(e)}return v.isFunction(e)?r.ready(e):(e.selector!==t&&(this.selector=e.selector,this.context=e.context),v.makeArray(e,this))},selector:"",jquery:"1.8.3",length:0,size:function(){return this.length},toArray:function(){return l.call(this)},get:function(e){return e==null?this.toArray():e<0?this[this.length+e]:this[e]},pushStack:function(e,t,n){var r=v.merge(this.constructor(),e);return r.prevObject=this,r.context=this.context,t==="find"?r.selector=this.selector+(this.selector?" ":"")+n:t&&(r.selector=this.selector+"."+t+"("+n+")"),r},each:function(e,t){return v.each(this,e,t)},ready:function(e){return v.ready.promise().done(e),this},eq:function(e){return e=+e,e===-1?this.slice(e):this.slice(e,e+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(l.apply(this,arguments),"slice",l.call(arguments).join(","))},map:function(e){return this.pushStack(v.map(this,function(t,n){return e.call(t,n,t)}))},end:function(){return this.prevObject||this.constructor(null)},push:f,sort:[].sort,splice:[].splice},v.fn.init.prototype=v.fn,v.extend=v.fn.extend=function(){var e,n,r,i,s,o,u=arguments[0]||{},a=1,f=arguments.length,l=!1;typeof u=="boolean"&&(l=u,u=arguments[1]||{},a=2),typeof u!="object"&&!v.isFunction(u)&&(u={}),f===a&&(u=this,--a);for(;a| t |