├── .gitignore
├── README.md
├── app.js
├── app.json
├── app.wxss
├── components
├── app-nav
│ ├── index.js
│ ├── index.json
│ ├── index.wxml
│ └── index.wxss
└── ec-canvas
│ ├── ec-canvas.js
│ ├── ec-canvas.json
│ ├── ec-canvas.wxml
│ ├── ec-canvas.wxss
│ ├── echarts.js
│ └── wx-canvas.js
├── env.js.example
├── package.json
├── pages
├── coco-ssd
│ ├── index.js
│ ├── index.json
│ ├── index.wxml
│ ├── index.wxss
│ └── models.js
├── index
│ ├── index.js
│ ├── index.json
│ ├── index.wxml
│ └── index.wxss
├── posenet
│ ├── index.js
│ ├── index.json
│ ├── index.wxml
│ ├── index.wxss
│ ├── models.js
│ └── util.js
└── recorder
│ ├── index.js
│ ├── index.json
│ ├── index.wxml
│ ├── index.wxss
│ └── utils.js
├── project.config.json
├── sitemap.json
├── static
├── img
│ ├── app-avatar.png
│ ├── coco-ssd.png
│ ├── posenet.png
│ ├── powered-by-tensorflow.png
│ ├── recorder.png
│ └── share-img.png
└── svg
│ ├── share-icon.svg
│ └── tensorflow-logo.svg
├── utils
└── util.js
└── yarn.lock
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 |
9 | # Diagnostic reports (https://nodejs.org/api/report.html)
10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
11 |
12 | # Runtime data
13 | pids
14 | *.pid
15 | *.seed
16 | *.pid.lock
17 |
18 | # Directory for instrumented libs generated by jscoverage/JSCover
19 | lib-cov
20 |
21 | # Coverage directory used by tools like istanbul
22 | coverage
23 | *.lcov
24 |
25 | # nyc test coverage
26 | .nyc_output
27 |
28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
29 | .grunt
30 |
31 | # Bower dependency directory (https://bower.io/)
32 | bower_components
33 |
34 | # node-waf configuration
35 | .lock-wscript
36 |
37 | # Compiled binary addons (https://nodejs.org/api/addons.html)
38 | build/Release
39 |
40 | # Dependency directories
41 | node_modules/
42 | jspm_packages/
43 | miniprogram_npm/
44 |
45 | # TypeScript v1 declaration files
46 | typings/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 |
54 | # Optional eslint cache
55 | .eslintcache
56 |
57 | # Microbundle cache
58 | .rpt2_cache/
59 | .rts2_cache_cjs/
60 | .rts2_cache_es/
61 | .rts2_cache_umd/
62 |
63 | # Optional REPL history
64 | .node_repl_history
65 |
66 | # Output of 'npm pack'
67 | *.tgz
68 |
69 | # Yarn Integrity file
70 | .yarn-integrity
71 |
72 | # dotenv environment variables file
73 | .env
74 | .env.test
75 |
76 | # parcel-bundler cache (https://parceljs.org/)
77 | .cache
78 |
79 | # Next.js build output
80 | .next
81 |
82 | # Nuxt.js build / generate output
83 | .nuxt
84 | dist
85 |
86 | # Gatsby files
87 | .cache/
88 | # Comment in the public line in if your project uses Gatsby and *not* Next.js
89 | # https://nextjs.org/blog/next-9-1#public-directory-support
90 | # public
91 |
92 | # vuepress build output
93 | .vuepress/dist
94 |
95 | # Serverless directories
96 | .serverless/
97 |
98 | # FuseBox cache
99 | .fusebox/
100 |
101 | # DynamoDB Local files
102 | .dynamodb/
103 |
104 | # TernJS port file
105 | .tern-port
106 | env.js
107 | .DS_Store
108 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # tensorflow-wxapp
2 |
3 | [TensorFlow.js](https://tensorflow.google.cn/js/?hl=zh_cn)是一个 `JavaScript` 库,用于在浏览器和 `Node.js` 上训练和部署机器学习模型。
4 |
5 | 同样的,现在也可以在微信小程序里使用 `TensorFlow` 团队提供的插件,运行一些开箱即用的预训练模型。
6 |
7 | 现在项目中已经引入:[实时估计人体姿势(PoseNet)](https://github.com/tensorflow/tfjs-models/tree/master/posenet)和定位和[识别单个图像中的多个对象(Coco SSD)](https://github.com/tensorflow/tfjs-models/tree/master/coco-ssd)两个模型。
8 |
9 | ## 快速开始
10 |
11 | * 首先需要在小程序后台添加`TensorFlow.js`插件,[参考此文档。](https://mp.weixin.qq.com/wxopen/plugindevdoc?appid=wx6afed118d9e81df9&token=378013697&lang=zh_CN)
12 |
13 | * 在项目根目录下安装项目需要用到的`npm`包,使用`yarn`或者`npm`都可。
14 |
15 | ```bash
16 | yarn install
17 | ```
18 |
19 | * 注意:安装好`npm`包后一定有在开发者工具里**构建`npm`**。可以参考微信官方文档,[在小程序中使用npm](https://developers.weixin.qq.com/miniprogram/dev/devtools/npm.html)。
20 |
21 | * 修改根目录下的`env.js.example`文件,把其中的模型地址替换成你的模型地址。
22 |
23 | ## 线上版本
24 |
25 | 微信搜索:**TensorFlow机器学习模型**。或者扫码:
26 |
27 | 
28 |
--------------------------------------------------------------------------------
/app.js:
--------------------------------------------------------------------------------
1 | //app.js
2 | var fetchWechat = require('fetch-wechat');
3 | var tf = require('@tensorflow/tfjs-core');
4 | var webgl = require('@tensorflow/tfjs-backend-webgl');
5 | var cpu = require('@tensorflow/tfjs-backend-cpu');
6 | var plugin = requirePlugin('tfjsPlugin');
7 | App({
8 | onLaunch: function () {
9 | this.getDeviceInfo();
10 | tf.ENV.flagRegistry.WEBGL_VERSION.evaluationFn = () => { return 1 };
11 | plugin.configPlugin({
12 | // polyfill fetch function
13 | fetchFunc: fetchWechat.fetchFunc(),
14 | // inject tfjs runtime
15 | tf,
16 | // inject webgl backend
17 | webgl,
18 | // inject cpu backend
19 | cpu,
20 | // provide webgl canvas
21 | canvas: wx.createOffscreenCanvas()
22 | });
23 | // tf.tensor([1, 2, 3, 4]).print();
24 | },
25 | getDeviceInfo() {
26 | try {
27 | const res = wx.getSystemInfoSync();
28 | this.globalData.appWidth = typeof res.screenWidth === 'number' ? res.screenWidth : 320;
29 | this.globalData.appHeight = typeof res.screenHeight === 'number' ? res.screenHeight : 500;
30 | this.globalData.benchmarkLevel = typeof res.benchmarkLevel === 'number' ? res.benchmarkLevel : -1;
31 | wx.reportAnalytics('get_device_info', {
32 | device_info: JSON.stringify(res)
33 | });
34 | } catch (e) {
35 | console.log(e);
36 | }
37 | },
38 | globalData: {
39 | appWidth: 320,
40 | appHeight: 500,
41 | benchmarkLevel: -1
42 | }
43 | })
--------------------------------------------------------------------------------
/app.json:
--------------------------------------------------------------------------------
1 | {
2 | "pages": [
3 | "pages/index/index",
4 | "pages/posenet/index",
5 | "pages/coco-ssd/index",
6 | "pages/recorder/index"
7 | ],
8 | "window": {
9 | "backgroundTextStyle": "light",
10 | "navigationBarBackgroundColor": "#fff",
11 | "navigationBarTitleText": "TensorFlow",
12 | "navigationBarTextStyle": "black",
13 | "navigationStyle": "custom"
14 | },
15 | "usingComponents": {
16 | "van-nav-bar": "@vant/weapp/nav-bar/index",
17 | "app-nav": "components/app-nav/index",
18 | "van-toast": "@vant/weapp/toast/index"
19 | },
20 | "plugins": {
21 | "tfjsPlugin": {
22 | "version": "0.2.0",
23 | "provider": "wx6afed118d9e81df9"
24 | }
25 | },
26 | "sitemapLocation": "sitemap.json"
27 | }
--------------------------------------------------------------------------------
/app.wxss:
--------------------------------------------------------------------------------
1 | /**app.wxss**/
2 | /* 自定义按钮样式 */
3 | .customize-btn {
4 | display: flex;
5 | align-items: center;
6 | justify-content: center;
7 | margin: 0;
8 | padding: 0;
9 | background-color: rgba(0, 0, 0, 0);
10 | line-height: inherit;
11 | height: 50rpx;
12 | border-radius: 0;
13 | border: none;
14 | }
15 |
16 | .customize-btn::after {
17 | display: none;
18 | }
--------------------------------------------------------------------------------
/components/app-nav/index.js:
--------------------------------------------------------------------------------
1 | // components/app-van/index.js
2 | Component({
3 | options: {
4 | styleIsolation: 'shared'
5 | },
6 | /**
7 | * 组件的属性列表
8 | */
9 | properties: {
10 |
11 | },
12 |
13 | /**
14 | * 组件的初始数据
15 | */
16 | data: {
17 |
18 | },
19 |
20 | /**
21 | * 组件的方法列表
22 | */
23 | methods: {
24 | handleToHome() {
25 | let length = getCurrentPages().length;
26 | if (length > 1) {
27 | wx.navigateBack();
28 | } else {
29 | wx.redirectTo({ url: `/pages/index/index` });
30 | }
31 | }
32 | }
33 | })
34 |
--------------------------------------------------------------------------------
/components/app-nav/index.json:
--------------------------------------------------------------------------------
1 | {
2 | "component": true,
3 | "usingComponents": {}
4 | }
--------------------------------------------------------------------------------
/components/app-nav/index.wxml:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/components/app-nav/index.wxss:
--------------------------------------------------------------------------------
1 | /* components/app-van/index.wxss */
2 | .nav .van-nav-bar__text {
3 | color: #ff9100;
4 | font-style: 88rpx;
5 | font-weight: bold;
6 | }
--------------------------------------------------------------------------------
/components/ec-canvas/ec-canvas.js:
--------------------------------------------------------------------------------
1 | import WxCanvas from './wx-canvas';
2 | import * as echarts from './echarts';
3 |
4 | let ctx;
5 |
6 | function compareVersion(v1, v2) {
7 | v1 = v1.split('.')
8 | v2 = v2.split('.')
9 | const len = Math.max(v1.length, v2.length)
10 |
11 | while (v1.length < len) {
12 | v1.push('0')
13 | }
14 | while (v2.length < len) {
15 | v2.push('0')
16 | }
17 |
18 | for (let i = 0; i < len; i++) {
19 | const num1 = parseInt(v1[i])
20 | const num2 = parseInt(v2[i])
21 |
22 | if (num1 > num2) {
23 | return 1
24 | } else if (num1 < num2) {
25 | return -1
26 | }
27 | }
28 | return 0
29 | }
30 |
31 | Component({
32 | properties: {
33 | canvasId: {
34 | type: String,
35 | value: 'ec-canvas'
36 | },
37 |
38 | ec: {
39 | type: Object
40 | },
41 |
42 | forceUseOldCanvas: {
43 | type: Boolean,
44 | value: false
45 | }
46 | },
47 |
48 | data: {
49 | isUseNewCanvas: false
50 | },
51 |
52 | ready: function () {
53 | // Disable prograssive because drawImage doesn't support DOM as parameter
54 | // See https://developers.weixin.qq.com/miniprogram/dev/api/canvas/CanvasContext.drawImage.html
55 | echarts.registerPreprocessor(option => {
56 | if (option && option.series) {
57 | if (option.series.length > 0) {
58 | option.series.forEach(series => {
59 | series.progressive = 0;
60 | });
61 | }
62 | else if (typeof option.series === 'object') {
63 | option.series.progressive = 0;
64 | }
65 | }
66 | });
67 |
68 | if (!this.data.ec) {
69 | console.warn('组件需绑定 ec 变量,例:');
71 | return;
72 | }
73 |
74 | if (!this.data.ec.lazyLoad) {
75 | this.init();
76 | }
77 | },
78 |
79 | methods: {
80 | init: function (callback) {
81 | const version = wx.getSystemInfoSync().SDKVersion
82 |
83 | const canUseNewCanvas = compareVersion(version, '2.9.0') >= 0;
84 | const forceUseOldCanvas = this.data.forceUseOldCanvas;
85 | const isUseNewCanvas = canUseNewCanvas && !forceUseOldCanvas;
86 | this.setData({ isUseNewCanvas });
87 |
88 | if (forceUseOldCanvas && canUseNewCanvas) {
89 | console.warn('开发者强制使用旧canvas,建议关闭');
90 | }
91 |
92 | if (isUseNewCanvas) {
93 | // console.log('微信基础库版本大于2.9.0,开始使用');
94 | // 2.9.0 可以使用
95 | this.initByNewWay(callback);
96 | } else {
97 | const isValid = compareVersion(version, '1.9.91') >= 0
98 | if (!isValid) {
99 | console.error('微信基础库版本过低,需大于等于 1.9.91。'
100 | + '参见:https://github.com/ecomfe/echarts-for-weixin'
101 | + '#%E5%BE%AE%E4%BF%A1%E7%89%88%E6%9C%AC%E8%A6%81%E6%B1%82');
102 | return;
103 | } else {
104 | console.warn('建议将微信基础库调整大于等于2.9.0版本。升级后绘图将有更好性能');
105 | this.initByOldWay(callback);
106 | }
107 | }
108 | },
109 |
110 | initByOldWay(callback) {
111 | // 1.9.91 <= version < 2.9.0:原来的方式初始化
112 | ctx = wx.createCanvasContext(this.data.canvasId, this);
113 | const canvas = new WxCanvas(ctx, this.data.canvasId, false);
114 |
115 | echarts.setCanvasCreator(() => {
116 | return canvas;
117 | });
118 | // const canvasDpr = wx.getSystemInfoSync().pixelRatio // 微信旧的canvas不能传入dpr
119 | const canvasDpr = 1
120 | var query = wx.createSelectorQuery().in(this);
121 | query.select('.ec-canvas').boundingClientRect(res => {
122 | if (typeof callback === 'function') {
123 | this.chart = callback(canvas, res.width, res.height, canvasDpr);
124 | }
125 | else if (this.data.ec && typeof this.data.ec.onInit === 'function') {
126 | this.chart = this.data.ec.onInit(canvas, res.width, res.height, canvasDpr);
127 | }
128 | else {
129 | this.triggerEvent('init', {
130 | canvas: canvas,
131 | width: res.width,
132 | height: res.height,
133 | canvasDpr: canvasDpr // 增加了dpr,可方便外面echarts.init
134 | });
135 | }
136 | }).exec();
137 | },
138 |
139 | initByNewWay(callback) {
140 | // version >= 2.9.0:使用新的方式初始化
141 | const query = wx.createSelectorQuery().in(this)
142 | query
143 | .select('.ec-canvas')
144 | .fields({ node: true, size: true })
145 | .exec(res => {
146 | const canvasNode = res[0].node
147 | this.canvasNode = canvasNode
148 |
149 | const canvasDpr = wx.getSystemInfoSync().pixelRatio
150 | const canvasWidth = res[0].width
151 | const canvasHeight = res[0].height
152 |
153 | const ctx = canvasNode.getContext('2d')
154 |
155 | const canvas = new WxCanvas(ctx, this.data.canvasId, true, canvasNode)
156 | echarts.setCanvasCreator(() => {
157 | return canvas
158 | })
159 |
160 | if (typeof callback === 'function') {
161 | this.chart = callback(canvas, canvasWidth, canvasHeight, canvasDpr)
162 | } else if (this.data.ec && typeof this.data.ec.onInit === 'function') {
163 | this.chart = this.data.ec.onInit(canvas, canvasWidth, canvasHeight, canvasDpr)
164 | } else {
165 | this.triggerEvent('init', {
166 | canvas: canvas,
167 | width: canvasWidth,
168 | height: canvasHeight,
169 | dpr: canvasDpr
170 | })
171 | }
172 | })
173 | },
174 | canvasToTempFilePath(opt) {
175 | if (this.data.isUseNewCanvas) {
176 | // 新版
177 | const query = wx.createSelectorQuery().in(this)
178 | query
179 | .select('.ec-canvas')
180 | .fields({ node: true, size: true })
181 | .exec(res => {
182 | const canvasNode = res[0].node
183 | opt.canvas = canvasNode
184 | wx.canvasToTempFilePath(opt)
185 | })
186 | } else {
187 | // 旧的
188 | if (!opt.canvasId) {
189 | opt.canvasId = this.data.canvasId;
190 | }
191 | ctx.draw(true, () => {
192 | wx.canvasToTempFilePath(opt, this);
193 | });
194 | }
195 | },
196 |
197 | touchStart(e) {
198 | if (this.chart && e.touches.length > 0) {
199 | var touch = e.touches[0];
200 | var handler = this.chart.getZr().handler;
201 | handler.dispatch('mousedown', {
202 | zrX: touch.x,
203 | zrY: touch.y
204 | });
205 | handler.dispatch('mousemove', {
206 | zrX: touch.x,
207 | zrY: touch.y
208 | });
209 | handler.processGesture(wrapTouch(e), 'start');
210 | }
211 | },
212 |
213 | touchMove(e) {
214 | if (this.chart && e.touches.length > 0) {
215 | var touch = e.touches[0];
216 | var handler = this.chart.getZr().handler;
217 | handler.dispatch('mousemove', {
218 | zrX: touch.x,
219 | zrY: touch.y
220 | });
221 | handler.processGesture(wrapTouch(e), 'change');
222 | }
223 | },
224 |
225 | touchEnd(e) {
226 | if (this.chart) {
227 | const touch = e.changedTouches ? e.changedTouches[0] : {};
228 | var handler = this.chart.getZr().handler;
229 | handler.dispatch('mouseup', {
230 | zrX: touch.x,
231 | zrY: touch.y
232 | });
233 | handler.dispatch('click', {
234 | zrX: touch.x,
235 | zrY: touch.y
236 | });
237 | handler.processGesture(wrapTouch(e), 'end');
238 | }
239 | }
240 | }
241 | });
242 |
243 | function wrapTouch(event) {
244 | for (let i = 0; i < event.touches.length; ++i) {
245 | const touch = event.touches[i];
246 | touch.offsetX = touch.x;
247 | touch.offsetY = touch.y;
248 | }
249 | return event;
250 | }
251 |
--------------------------------------------------------------------------------
/components/ec-canvas/ec-canvas.json:
--------------------------------------------------------------------------------
1 | {
2 | "component": true,
3 | "usingComponents": {}
4 | }
--------------------------------------------------------------------------------
/components/ec-canvas/ec-canvas.wxml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/components/ec-canvas/ec-canvas.wxss:
--------------------------------------------------------------------------------
1 | .ec-canvas {
2 | width: 100%;
3 | height: 100%;
4 | }
5 |
--------------------------------------------------------------------------------
/components/ec-canvas/wx-canvas.js:
--------------------------------------------------------------------------------
1 | export default class WxCanvas {
2 | constructor(ctx, canvasId, isNew, canvasNode) {
3 | this.ctx = ctx;
4 | this.canvasId = canvasId;
5 | this.chart = null;
6 | this.isNew = isNew
7 | if (isNew) {
8 | this.canvasNode = canvasNode;
9 | }
10 | else {
11 | this._initStyle(ctx);
12 | }
13 |
14 | // this._initCanvas(zrender, ctx);
15 |
16 | this._initEvent();
17 | }
18 |
19 | getContext(contextType) {
20 | if (contextType === '2d') {
21 | return this.ctx;
22 | }
23 | }
24 |
25 | // canvasToTempFilePath(opt) {
26 | // if (!opt.canvasId) {
27 | // opt.canvasId = this.canvasId;
28 | // }
29 | // return wx.canvasToTempFilePath(opt, this);
30 | // }
31 |
32 | setChart(chart) {
33 | this.chart = chart;
34 | }
35 |
36 | attachEvent() {
37 | // noop
38 | }
39 |
40 | detachEvent() {
41 | // noop
42 | }
43 |
44 | _initCanvas(zrender, ctx) {
45 | zrender.util.getContext = function () {
46 | return ctx;
47 | };
48 |
49 | zrender.util.$override('measureText', function (text, font) {
50 | ctx.font = font || '12px sans-serif';
51 | return ctx.measureText(text);
52 | });
53 | }
54 |
55 | _initStyle(ctx) {
56 | var styles = ['fillStyle', 'strokeStyle', 'globalAlpha',
57 | 'textAlign', 'textBaseAlign', 'shadow', 'lineWidth',
58 | 'lineCap', 'lineJoin', 'lineDash', 'miterLimit', 'fontSize'];
59 |
60 | styles.forEach(style => {
61 | Object.defineProperty(ctx, style, {
62 | set: value => {
63 | if (style !== 'fillStyle' && style !== 'strokeStyle'
64 | || value !== 'none' && value !== null
65 | ) {
66 | ctx['set' + style.charAt(0).toUpperCase() + style.slice(1)](value);
67 | }
68 | }
69 | });
70 | });
71 |
72 | ctx.createRadialGradient = () => {
73 | return ctx.createCircularGradient(arguments);
74 | };
75 | }
76 |
77 | _initEvent() {
78 | this.event = {};
79 | const eventNames = [{
80 | wxName: 'touchStart',
81 | ecName: 'mousedown'
82 | }, {
83 | wxName: 'touchMove',
84 | ecName: 'mousemove'
85 | }, {
86 | wxName: 'touchEnd',
87 | ecName: 'mouseup'
88 | }, {
89 | wxName: 'touchEnd',
90 | ecName: 'click'
91 | }];
92 |
93 | eventNames.forEach(name => {
94 | this.event[name.wxName] = e => {
95 | const touch = e.touches[0];
96 | this.chart.getZr().handler.dispatch(name.ecName, {
97 | zrX: name.wxName === 'tap' ? touch.clientX : touch.x,
98 | zrY: name.wxName === 'tap' ? touch.clientY : touch.y
99 | });
100 | };
101 | });
102 | }
103 |
104 | set width(w) {
105 | if (this.canvasNode) this.canvasNode.width = w
106 | }
107 | set height(h) {
108 | if (this.canvasNode) this.canvasNode.height = h
109 | }
110 |
111 | get width() {
112 | if (this.canvasNode)
113 | return this.canvasNode.width
114 | return 0
115 | }
116 | get height() {
117 | if (this.canvasNode)
118 | return this.canvasNode.height
119 | return 0
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/env.js.example:
--------------------------------------------------------------------------------
1 | // posenet 模型相关地址
2 | export const POSENET_URL = 'https://ai.flypot.cn/models/posenet/model.json';
3 | export const POSENET_BIN_URL = 'https://ai.flypot.cn/models/posenet/group1-shard1of1.bin';
4 | // coco-ssd 模型的地址
5 | export const SSD_NET_URL = 'https://ai.flypot.cn/models/coco-ssd/model.json';
6 | export const SSD_NET_BIN_URL1 = 'https://ai.flypot.cn/models/mobilenet/group1-shard1of3.bin';
7 | export const SSD_NET_BIN_URL2 = 'https://ai.flypot.cn/models/mobilenet/group1-shard2of3.bin';
8 | export const SSD_NET_BIN_URL3 = 'https://ai.flypot.cn/models/mobilenet/group1-shard3of3.bin';
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "tensorflow-wxapp",
3 | "version": "1.0.0",
4 | "main": "index.js",
5 | "repository": "https://github.com/GeekYmm/tensorflow-wxapp.git",
6 | "author": "yuanmengmeng ",
7 | "license": "MIT",
8 | "dependencies": {
9 | "@tensorflow-models/coco-ssd": "^2.1.0",
10 | "@tensorflow-models/posenet": "^2.2.1",
11 | "@tensorflow/tfjs-backend-cpu": "^2.7.0",
12 | "@tensorflow/tfjs-backend-webgl": "^2.7.0",
13 | "@tensorflow/tfjs-converter": "^2.7.0",
14 | "@tensorflow/tfjs-core": "^2.7.0",
15 | "@vant/weapp": "^1.6.1",
16 | "fetch-wechat": "^0.0.3"
17 | }
18 | }
19 |
--------------------------------------------------------------------------------
/pages/coco-ssd/index.js:
--------------------------------------------------------------------------------
1 | // pages/coco-ssd/index.js
2 | import Toast from '../../miniprogram_npm/@vant/weapp/toast/toast';
3 | import { Classifier } from './models';
4 | const { appWidth, appHeight, benchmarkLevel } = getApp().globalData;
5 |
6 | Page({
7 | classifier: null,
8 | ctx: null,
9 | /**
10 | * 页面的初始数据
11 | */
12 | data: {
13 | predicting: false
14 | },
15 |
16 | /**
17 | * 生命周期函数--监听页面加载
18 | */
19 | onLoad: function (options) {
20 | console.log('benchmarkLevel', benchmarkLevel);
21 | },
22 |
23 | /**
24 | * 生命周期函数--监听页面初次渲染完成
25 | */
26 | onReady: function () {
27 | this.ctx = wx.createCanvasContext('ssd');
28 | const context = wx.createCameraContext(this);
29 | this.initClassifier();
30 | let count = 0;
31 | const listener = context.onCameraFrame(frame => {
32 | count++;
33 | if (count === 2) { // 控制帧数
34 | if (this.classifier && this.classifier.isReady()) {
35 | this.executeClassify(frame);
36 | }
37 | count = 0;
38 | }
39 | });
40 | listener.start();
41 | },
42 |
43 | /**
44 | * 初始化 SSD models
45 | */
46 | initClassifier() {
47 | wx.showLoading({ title: '模型正在加载...' });
48 | this.classifier = new Classifier({ width: appWidth, height: appHeight });
49 | this.classifier.load().then(() => {
50 | wx.hideLoading();
51 | }).catch(err => {
52 | console.log('模型加载报错:', err);
53 | Toast.loading({
54 | message: '网络连接异常',
55 | forbidClick: true,
56 | loadingType: 'spinner',
57 | });
58 | })
59 | },
60 |
61 | /**
62 | * 构建模型
63 | */
64 | executeClassify: function (frame) {
65 | if (this.classifier && this.classifier.isReady() && !this.data.predicting) {
66 | this.setData({
67 | predicting: true
68 | }, () => {
69 | this.classifier.detect(frame).then(res => {
70 | // console.log(res)
71 | this.classifier.drawBoxes(this.ctx, res);
72 | this.data.predicting = false;
73 | }).catch((err) => {
74 | console.log(err)
75 | })
76 | })
77 | }
78 | },
79 |
80 | /**
81 | * 生命周期函数--监听页面显示
82 | */
83 | onShow: function () {
84 |
85 | },
86 |
87 | /**
88 | * 生命周期函数--监听页面隐藏
89 | */
90 | onHide: function () {
91 |
92 | },
93 |
94 | /**
95 | * 生命周期函数--监听页面卸载
96 | */
97 | onUnload: function () {
98 | if (this.classifier && this.classifier.isReady()) {
99 | this.classifier.dispose();
100 | }
101 | },
102 |
103 | /**
104 | * 页面相关事件处理函数--监听用户下拉动作
105 | */
106 | onPullDownRefresh: function () {
107 |
108 | },
109 |
110 | /**
111 | * 页面上拉触底事件的处理函数
112 | */
113 | onReachBottom: function () {
114 |
115 | },
116 |
117 | /**
118 | * 用户点击右上角分享
119 | */
120 | onShareAppMessage: function () {
121 | return {
122 | title: '微信小程序 × Coco SSD',
123 | path: '/pages/coco-ssd/index',
124 | imageUrl: '/static/img/share-img.png'
125 | }
126 | },
127 | onAddToFavorites() {
128 | return {
129 | title: '微信小程序 × Coco SSD',
130 | imageUrl: '/static/img/app-avatar.png'
131 | }
132 | }
133 | })
--------------------------------------------------------------------------------
/pages/coco-ssd/index.json:
--------------------------------------------------------------------------------
1 | {
2 | "disableScroll": true,
3 | "usingComponents": {}
4 | }
--------------------------------------------------------------------------------
/pages/coco-ssd/index.wxml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/pages/coco-ssd/index.wxss:
--------------------------------------------------------------------------------
1 | /* pages/coco-ssd/index.wxss */
2 | .camera {
3 | width: 100%;
4 | height: 100vh;
5 | }
6 |
7 | .canvas {
8 | width: 100%;
9 | height: 100vh;
10 | }
--------------------------------------------------------------------------------
/pages/coco-ssd/models.js:
--------------------------------------------------------------------------------
1 | import * as tf from '@tensorflow/tfjs-core'
2 |
3 | import * as cocoSsd from '@tensorflow-models/coco-ssd'
4 |
5 | import { getFrameSliceOptions } from '../../utils/util'
6 |
7 | import { SSD_NET_URL } from '../../env'
8 |
9 | const fontSize = 20
10 | const color = 'aqua'
11 | const lineWidth = 2
12 |
13 | export class Classifier {
14 | // 图像显示尺寸结构体 { width: Number, height: Number }
15 | displaySize
16 |
17 | // 神经网络模型
18 | ssdNet
19 |
20 | // ready
21 | ready
22 |
23 | constructor(displaySize) {
24 | this.displaySize = {
25 | width: displaySize.width,
26 | height: displaySize.height
27 | }
28 |
29 | this.ready = false
30 | }
31 |
32 | load() {
33 | return new Promise((resolve, reject) => {
34 | cocoSsd.load({
35 | modelUrl: SSD_NET_URL
36 | }).then(model => {
37 | this.ssdNet = model
38 | this.ready = true
39 | resolve()
40 | }).catch(err => {
41 | reject(err)
42 | })
43 | })
44 | }
45 |
46 | isReady() {
47 | return this.ready
48 | }
49 |
50 | detect(frame) {
51 | return new Promise((resolve, reject) => {
52 | const tensor = tf.tidy(() => {
53 | // const temp = tf.tensor(new Uint8Array(frame.data), [frame.height, frame.width, 4])
54 | const imgData = {
55 | data: new Uint8Array(frame.data),
56 | width: frame.width,
57 | height: frame.height
58 | }
59 | const temp = tf.browser.fromPixels(imgData, 4)
60 | const sliceOptions = getFrameSliceOptions(frame.width, frame.height, this.displaySize.width, this.displaySize.height)
61 |
62 | return temp.slice(sliceOptions.start, sliceOptions.size).resizeBilinear([this.displaySize.height, this.displaySize.width]).asType('int32')
63 | })
64 |
65 | this.ssdNet.detect(tensor).then(res => {
66 | tensor.dispose()
67 | resolve(res)
68 | }).catch(err => {
69 | console.log(err)
70 | tensor.dispose()
71 | reject()
72 | })
73 | })
74 | }
75 |
76 | drawBoxes(ctx, boxes) {
77 | if (!ctx && !boxes) {
78 | return
79 | }
80 |
81 | const minScore = 0.3
82 |
83 | ctx.setFontSize(fontSize)
84 | ctx.strokeStyle = color
85 | ctx.lineWidth = lineWidth
86 |
87 | boxes.forEach(box => {
88 | if (box.score >= minScore) {
89 | ctx.rect(...(box.bbox))
90 | ctx.stroke()
91 |
92 | ctx.setFillStyle(color)
93 | ctx.fillText(box['class'], box.bbox[0], box.bbox[1] - 5)
94 | }
95 | })
96 |
97 | ctx.draw()
98 | return true
99 | }
100 |
101 | dispose() {
102 | this.ssdNet.dispose()
103 | }
104 | }
--------------------------------------------------------------------------------
/pages/index/index.js:
--------------------------------------------------------------------------------
1 | // pages/index/index.js
2 | Page({
3 |
4 | /**
5 | * 页面的初始数据
6 | */
7 | data: {
8 | modelsList: [{
9 | url: '/pages/posenet/index',
10 | logo: '/static/img/posenet.png',
11 | title: '姿势估计',
12 | desc: '实时估计人体姿势 (PoseNet)。'
13 | }, {
14 | url: '/pages/coco-ssd/index',
15 | logo: '/static/img/coco-ssd.png',
16 | title: '对象检测',
17 | desc: '定位和识别单个图像中的多个对象 (Coco SSD)。'
18 | }, {
19 | url: '/pages/recorder/index',
20 | logo: '/static/img/recorder.png',
21 | title: '实时解析声音',
22 | desc: '利用小程序录音API,实时解析PCM格式的声音,并转换成音量大小的百分比。这个转换结果并不是具体分贝。'
23 | }]
24 | },
25 |
26 | handleClickItem(e) {
27 | let { url } = e.currentTarget.dataset;
28 | wx.navigateTo({ url });
29 | },
30 |
31 | /**
32 | * 生命周期函数--监听页面加载
33 | */
34 | onLoad: function (options) {
35 |
36 | },
37 |
38 | /**
39 | * 生命周期函数--监听页面初次渲染完成
40 | */
41 | onReady: function () {
42 |
43 | },
44 |
45 | /**
46 | * 生命周期函数--监听页面显示
47 | */
48 | onShow: function () {
49 |
50 | },
51 |
52 | /**
53 | * 生命周期函数--监听页面隐藏
54 | */
55 | onHide: function () {
56 |
57 | },
58 |
59 | /**
60 | * 生命周期函数--监听页面卸载
61 | */
62 | onUnload: function () {
63 |
64 | },
65 |
66 | /**
67 | * 页面相关事件处理函数--监听用户下拉动作
68 | */
69 | onPullDownRefresh: function () {
70 |
71 | },
72 |
73 | /**
74 | * 页面上拉触底事件的处理函数
75 | */
76 | onReachBottom: function () {
77 |
78 | },
79 |
80 | /**
81 | * 用户点击右上角分享
82 | */
83 | onShareAppMessage: function () {
84 | return {
85 | title: 'TensorFlow遇上小程序',
86 | path: '/pages/index/index',
87 | imageUrl: '/static/img/share-img.png'
88 | }
89 | },
90 | onAddToFavorites() {
91 | return {
92 | title: 'TensorFlow遇上小程序',
93 | imageUrl: '/static/img/app-avatar.png'
94 | }
95 | },
96 | onShareTimeline() {
97 | return {
98 | title: 'TensorFlow遇上小程序',
99 | imageUrl: '/static/img/app-avatar.png'
100 | }
101 | }
102 | })
--------------------------------------------------------------------------------
/pages/index/index.json:
--------------------------------------------------------------------------------
1 | {
2 | "usingComponents": {}
3 | }
--------------------------------------------------------------------------------
/pages/index/index.wxml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
7 |
8 |
9 |
10 |
11 | 关于
12 |
13 | TensorFlow.js 是一个 JavaScript 库,用于在浏览器和 Node.js 上训练和部署机器学习模型。同样的,现在也可以在小程序里使用 TensorFlow 团队提供的插件,运行这些开箱即用的预训练模型。
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | {{item.title}}
22 | {{item.desc}}
23 |
24 |
25 |
31 |
--------------------------------------------------------------------------------
/pages/index/index.wxss:
--------------------------------------------------------------------------------
1 | /* pages/index/index.wxss */
2 | .share {
3 | font-size: 24rpx;
4 | font-weight: bold;
5 | color: #ff9100;
6 | }
7 |
8 | .share-icon {
9 | padding: 8rpx 8rpx 8rpx 0;
10 | width: 40rpx;
11 | width: 40rpx;
12 | }
13 |
14 | .tensorflow-logo {
15 | width: 400rpx;
16 | height: 70rpx;
17 | padding-top: 10rpx;
18 | }
19 |
20 | .container {
21 | display: flex;
22 | flex-direction: column;
23 | align-items: center;
24 | justify-content: center;
25 | }
26 |
27 | .about {
28 | margin: 40rpx auto;
29 | width: 680rpx;
30 | }
31 |
32 | .title {
33 | font-size: 40rpx;
34 | font-weight: 700;
35 | line-height: 2;
36 | color: #425066;
37 | padding-bottom: 24rpx;
38 | }
39 |
40 | .subtitle {
41 | color: #5f6368;
42 | font-size: 28rpx;
43 | line-height: 1.6;
44 | }
45 |
46 | .models {
47 | display: flex;
48 | flex-direction: column;
49 | overflow: hidden;
50 | width: 610rpx;
51 | margin: 30rpx;
52 | border-radius: 20rpx;
53 | box-shadow: 0 0 36rpx rgba(0, 0, 0, 0.1);
54 | transform: translate3d(0, 0, 0);
55 | }
56 |
57 | .item-top {
58 | width: 100%;
59 | height: 300rpx;
60 | /* border-bottom: solid 1px #dadada; */
61 | border-radius: 20rpx 20rpx 0 0;
62 | display: flex;
63 | align-items: center;
64 | justify-content: center;
65 | }
66 |
67 | .models-icon {
68 | width: 180rpx;
69 | height: 180rpx;
70 | }
71 |
72 | .item-bottom {
73 | display: flex;
74 | flex-direction: column;
75 | padding: 52rpx 60rpx;
76 | background: linear-gradient(to right, #ff6f00, #ff9100);
77 | }
78 |
79 | .item-title {
80 | font-size: 40rpx;
81 | font-weight: 500;
82 | color: #ffffff;
83 | margin-bottom: 24rpx;
84 | }
85 |
86 | .item-desc {
87 | color: #ffffff;
88 | font-size: 28rpx;
89 | line-height: 44rpx;
90 | margin-bottom: 16rpx;
91 | }
92 |
93 | .footer {
94 | display: flex;
95 | align-items: center;
96 | justify-content: center;
97 | padding: 20rpx 0 60rpx;
98 | }
99 |
100 | .vertical-line {
101 | width: 1px;
102 | height: 30rpx;
103 | background: #5f6368;
104 | margin: 0 10rpx;
105 | }
106 |
107 | .powered-by-tensorflow {
108 | width: 140rpx;
109 | height: 62rpx;
110 | }
--------------------------------------------------------------------------------
/pages/posenet/index.js:
--------------------------------------------------------------------------------
1 | // pages/posenet/index.js
2 | import Toast from '../../miniprogram_npm/@vant/weapp/toast/toast';
3 | import { Classifier } from './models';
4 | const { appWidth, appHeight, benchmarkLevel } = getApp().globalData;
5 |
6 | Page({
7 | classifier: null,
8 | ctx: null,
9 | /**
10 | * 页面的初始数据
11 | */
12 | data: {
13 | devicePosition: 'front',
14 | predicting: false
15 | },
16 | handleSwitchCamera() {
17 | let devicePosition = this.data.devicePosition === 'front' ? 'back' : 'front';
18 | this.setData({ devicePosition });
19 | },
20 | /**
21 | * 生命周期函数--监听页面加载
22 | */
23 | onLoad: function (options) {
24 | console.log('benchmarkLevel', benchmarkLevel);
25 | },
26 | onCameraError(err) {
27 | console.log('onCameraError>>', err);
28 | },
29 | /**
30 | * 生命周期函数--监听页面初次渲染完成
31 | */
32 | onReady() {
33 | const context = wx.createCameraContext(this);
34 | this.ctx = wx.createCanvasContext('pose', this);
35 | this.initClassifier();
36 | let count = 0;
37 | const listener = context.onCameraFrame(frame => {
38 | count++;
39 | if (count === 2) { // 控制帧数
40 | if (this.classifier && this.classifier.isReady()) {
41 | this.executeClassify(frame);
42 | }
43 | count = 0;
44 | }
45 | });
46 | listener.start();
47 | },
48 |
49 | initClassifier() {
50 | wx.showLoading({ title: '模型正在加载...' });
51 | this.classifier = new Classifier({ width: appWidth, height: appHeight });
52 | this.classifier.load().then(() => {
53 | wx.hideLoading();
54 | }).catch(err => {
55 | console.log('模型加载报错:', err);
56 | Toast.loading({
57 | message: '网络连接异常',
58 | forbidClick: true,
59 | loadingType: 'spinner',
60 | });
61 | })
62 | },
63 |
64 | executeClassify(frame) {
65 | if (this.classifier && this.classifier.isReady() && !this.data.predicting) {
66 | this.setData({
67 | predicting: true
68 | }, () => {
69 | this.classifier.detectSinglePose(frame).then((pose) => {
70 | const nosePosition = pose.keypoints[0].position;
71 | this.classifier.drawSinglePose(this.ctx, pose);
72 | this.setData({
73 | predicting: false,
74 | nosePosition: Math.round(nosePosition.x) + ', ' + Math.round(nosePosition.y)
75 | })
76 | }).catch((err) => {
77 | console.log(err, err.stack);
78 | });
79 | });
80 | }
81 | },
82 |
83 | /**
84 | * 生命周期函数--监听页面显示
85 | */
86 | onShow: function () {
87 |
88 | },
89 |
90 | /**
91 | * 生命周期函数--监听页面隐藏
92 | */
93 | onHide: function () {
94 |
95 | },
96 |
97 | /**
98 | * 生命周期函数--监听页面卸载
99 | */
100 | onUnload: function () {
101 | if (this.classifier && this.classifier.isReady()) {
102 | this.classifier.dispose();
103 | }
104 | },
105 |
106 | /**
107 | * 页面相关事件处理函数--监听用户下拉动作
108 | */
109 | onPullDownRefresh: function () {
110 |
111 | },
112 |
113 | /**
114 | * 页面上拉触底事件的处理函数
115 | */
116 | onReachBottom: function () {
117 |
118 | },
119 |
120 | /**
121 | * 用户点击右上角分享
122 | */
123 | onShareAppMessage: function () {
124 | return {
125 | title: '微信小程序 × PoseNet',
126 | path: '/pages/posenet/index',
127 | imageUrl: '/static/img/share-img.png'
128 | }
129 | },
130 | onAddToFavorites() {
131 | return {
132 | title: '微信小程序 × PoseNet',
133 | imageUrl: '/static/img/app-avatar.png'
134 | }
135 | }
136 | })
--------------------------------------------------------------------------------
/pages/posenet/index.json:
--------------------------------------------------------------------------------
1 | {
2 | "disableScroll": true,
3 | "usingComponents": {}
4 | }
--------------------------------------------------------------------------------
/pages/posenet/index.wxml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
7 |
8 |
--------------------------------------------------------------------------------
/pages/posenet/index.wxss:
--------------------------------------------------------------------------------
1 | /* pages/index/index.wxss */
2 | .camera {
3 | width: 100%;
4 | height: 100vh;
5 | }
6 |
7 | .pose-canvas {
8 | width: 100%;
9 | height: 100vh;
10 | }
11 |
12 | .cover-btn {
13 | position: fixed;
14 | z-index: 100;
15 | bottom: 40rpx;
16 | left: 40rpx;
17 | width: 670rpx;
18 | height: 100rpx;
19 | background: #ff6f00;
20 | border-radius: 12rpx;
21 | font-size: 32rpx;
22 | font-weight: 500;
23 | color: #FFFFFF;
24 | line-height: 100rpx;
25 | text-align: center;
26 | }
--------------------------------------------------------------------------------
/pages/posenet/models.js:
--------------------------------------------------------------------------------
1 | import * as tf from '@tensorflow/tfjs-core'
2 |
3 | import * as posenet from '@tensorflow-models/posenet'
4 |
5 | import { getFrameSliceOptions } from '../../utils/util'
6 |
7 | import { drawKeypoints, drawSkeleton } from './util'
8 |
9 | import { POSENET_URL } from '../../env'
10 |
11 | export class Classifier {
12 |
13 | // 图像显示尺寸结构体 { width: Number, height: Number }
14 | displaySize
15 |
16 | // 神经网络模型
17 | poseNet
18 |
19 | // ready
20 | ready
21 |
22 | constructor(displaySize) {
23 | this.displaySize = {
24 | width: displaySize.width,
25 | height: displaySize.height
26 | }
27 | this.ready = false
28 | }
29 |
30 | load() {
31 | return new Promise((resolve, reject) => {
32 | posenet
33 | .load({
34 | architecture: 'MobileNetV1',
35 | outputStride: 16,
36 | inputResolution: 193,
37 | multiplier: 0.5,
38 | modelUrl: POSENET_URL
39 | })
40 | .then(model => {
41 | this.poseNet = model
42 | this.ready = true
43 | resolve()
44 | })
45 | .catch(err => {
46 | reject(err)
47 | })
48 | })
49 | }
50 |
51 | isReady() {
52 | return this.ready
53 | }
54 |
55 | detectSinglePose(frame) {
56 | return new Promise((resolve, reject) => {
57 | const video = tf.tidy(() => {
58 | // const temp = tf.tensor(new Uint8Array(frame.data), [frame.height, frame.width, 4])
59 | const imgData = {
60 | data: new Uint8Array(frame.data),
61 | width: frame.width,
62 | height: frame.height
63 | }
64 | const temp = tf.browser.fromPixels(imgData, 4)
65 | const sliceOptions = getFrameSliceOptions(frame.width, frame.height, this.displaySize.width, this.displaySize.height)
66 | return temp.slice(sliceOptions.start, sliceOptions.size).resizeBilinear([this.displaySize.height, this.displaySize.width])
67 | })
68 | // since images are being fed from a webcam
69 | const flipHorizontal = false
70 | this.poseNet.estimateSinglePose(video, { flipHorizontal }).then(pose => {
71 | video.dispose()
72 | resolve(pose)
73 | }).catch(err => {
74 | reject(err)
75 | })
76 | })
77 | }
78 |
79 | drawSinglePose(ctx, pose) {
80 | if (!ctx && !pose) {
81 | return
82 | }
83 |
84 | const minPoseConfidence = 0.3
85 | const minPartConfidence = 0.3
86 |
87 | if (pose.score >= minPoseConfidence) {
88 | drawKeypoints(pose.keypoints, minPartConfidence, ctx)
89 | drawSkeleton(pose.keypoints, minPartConfidence, ctx)
90 | }
91 |
92 | ctx.draw()
93 | return pose
94 | }
95 |
96 | dispose() {
97 | this.poseNet.dispose()
98 | }
99 | }
--------------------------------------------------------------------------------
/pages/posenet/util.js:
--------------------------------------------------------------------------------
1 | import * as posenet from '@tensorflow-models/posenet'
2 |
3 | const color = 'aqua'
4 | const boundingBoxColor = 'red'
5 | const lineWidth = 2
6 |
7 | function toTuple({ y, x }) {
8 | return [y, x]
9 | }
10 |
11 | export function drawPoint(ctx, y, x, r, color) {
12 | ctx.beginPath()
13 | ctx.arc(x, y, r, 0, 2 * Math.PI)
14 | ctx.fillStyle = color
15 | ctx.fill()
16 | }
17 |
18 | /**
19 | * Draws a line on a canvas, i.e. a joint
20 | */
21 | export function drawSegment([ay, ax], [by, bx], color, scale, ctx) {
22 | ctx.beginPath()
23 | ctx.moveTo(ax * scale, ay * scale)
24 | ctx.lineTo(bx * scale, by * scale)
25 | ctx.lineWidth = lineWidth
26 | ctx.strokeStyle = color
27 | ctx.stroke()
28 | }
29 |
30 | /**
31 | * Draws a pose skeleton by looking up all adjacent keypoints/joints
32 | */
33 | // tslint:disable-next-line:no-any
34 | export function drawSkeleton(keypoints, minConfidence, ctx, scale = 1) {
35 | const adjacentKeyPoints =
36 | posenet.getAdjacentKeyPoints(keypoints, minConfidence)
37 |
38 | // tslint:disable-next-line:no-any
39 | adjacentKeyPoints.forEach((keypoints) => {
40 | drawSegment(
41 | toTuple(keypoints[0].position), toTuple(keypoints[1].position), color,
42 | scale, ctx)
43 | })
44 | }
45 |
46 | /**
47 | * Draw pose keypoints onto a canvas
48 | */
49 | // tslint:disable-next-line:no-any
50 | export function drawKeypoints(keypoints, minConfidence, ctx, scale = 1) {
51 | for (let i = 0; i < keypoints.length; i++) {
52 | const keypoint = keypoints[i]
53 |
54 | if (keypoint.score < minConfidence) {
55 | continue
56 | }
57 |
58 | const { y, x } = keypoint.position
59 | drawPoint(ctx, y * scale, x * scale, 5, color)
60 | }
61 | }
62 |
63 | /**
64 | * Draw the bounding box of a pose. For example, for a whole person standing
65 | * in an image, the bounding box will begin at the nose and extend to one of
66 | * ankles
67 | */
68 | // tslint:disable-next-line:no-any
69 | export function drawBoundingBox(keypoints, ctx) {
70 | const boundingBox = posenet.getBoundingBox(keypoints)
71 |
72 | ctx.rect(
73 | boundingBox.minX, boundingBox.minY, boundingBox.maxX - boundingBox.minX,
74 | boundingBox.maxY - boundingBox.minY)
75 |
76 | ctx.strokeStyle = boundingBoxColor
77 | ctx.stroke()
78 | }
--------------------------------------------------------------------------------
/pages/recorder/index.js:
--------------------------------------------------------------------------------
1 | // pages/recorder/index.js
2 | import pcmToPowerLevel from './utils';
3 | import * as echarts from '../../components/ec-canvas/echarts';
4 | const recorderManager = wx.getRecorderManager();
5 | let option = {
6 | backgroundColor: "#ffffff",
7 | color: ["#37A2DA", "#32C5E9", "#67E0E3"],
8 | series: [{
9 | type: 'gauge',
10 | startAngle: 200,
11 | endAngle: -20,
12 | splitNumber: 5,
13 | animation: false,
14 | detail: {
15 | formatter: '{value}%'
16 | },
17 | splitLine: { show: false },
18 | axisLine: {
19 | show: true,
20 | lineStyle: {
21 | width: 16,
22 | shadowBlur: 1,
23 | color: [
24 | [0.3, '#67e0e3'],
25 | [0.7, '#37a2da'],
26 | [1, '#fd666d']
27 | ]
28 | }
29 | },
30 | data: [{
31 | value: 0,
32 | name: '音量值',
33 | }]
34 | }]
35 | };
36 |
37 | let chart = null;
38 | const initChart = (canvas, width, height, dpr) => {
39 | chart = echarts.init(canvas, null, {
40 | width: width,
41 | height: height,
42 | devicePixelRatio: dpr // new
43 | });
44 | canvas.setChart(chart);
45 | chart.setOption(option, true);
46 | return chart;
47 | }
48 |
49 | Page({
50 |
51 | /**
52 | * 页面的初始数据
53 | */
54 | data: {
55 | /**
56 | * start
57 | * stop
58 | * pause
59 | */
60 | buttonStatus: 'start',
61 | ec: {
62 | onInit: initChart
63 | }
64 | },
65 |
66 | /**
67 | * 点击开始
68 | */
69 | handleClickStart() {
70 | const options = {
71 | duration: 60000,
72 | sampleRate: 16000,
73 | encodeBitRate: 96000,
74 | format: 'PCM',
75 | frameSize: 1
76 | };
77 | wx.getSetting({
78 | success: res => {
79 | if (res.authSetting['scope.record']) {
80 | recorderManager.start(options);
81 | this.setData({ buttonStatus: 'stop' });
82 | } else {
83 | wx.openSetting();
84 | }
85 | }
86 | });
87 | },
88 |
89 | /**
90 | * 点击继续
91 | */
92 | handleClickContinue() {
93 | recorderManager.resume();
94 | this.setData({ buttonStatus: 'stop' });
95 | },
96 |
97 | /**
98 | * 点击结束
99 | */
100 | handleClickStop() {
101 | recorderManager.stop();
102 | },
103 |
104 | /**
105 | * 生命周期函数--监听页面加载
106 | */
107 | onLoad: function (options) {
108 |
109 | },
110 |
111 | /**
112 | * 生命周期函数--监听页面初次渲染完成
113 | */
114 | onReady: function () {
115 |
116 | },
117 |
118 | /**
119 | * 生命周期函数--监听页面显示
120 | */
121 | onShow: function () {
122 | wx.getSetting({
123 | success: res => {
124 | if (!res.authSetting['scope.record']) { wx.authorize({ scope: 'scope.record' }); }
125 | }
126 | });
127 |
128 | recorderManager.onPause(() => {
129 | this.setData({ buttonStatus: 'pause' });
130 | });
131 |
132 | recorderManager.onStop(() => {
133 | this.setData({ buttonStatus: 'start' }, () => {
134 | option.series[0].data[0].value = 0;
135 | chart.setOption(option, true);
136 | });
137 | });
138 |
139 | recorderManager.onFrameRecorded(res => {
140 | const { frameBuffer } = res;
141 | let result = pcmToPowerLevel(frameBuffer);
142 | option.series[0].data[0].value = result;
143 | chart.setOption(option, true);
144 | });
145 | },
146 |
147 | /**
148 | * 生命周期函数--监听页面隐藏
149 | */
150 | onHide: function () {
151 | recorderManager.pause();
152 | },
153 |
154 | /**
155 | * 生命周期函数--监听页面卸载
156 | */
157 | onUnload: function () {
158 | recorderManager.stop();
159 | },
160 |
161 | /**
162 | * 页面相关事件处理函数--监听用户下拉动作
163 | */
164 | onPullDownRefresh: function () {
165 |
166 | },
167 |
168 | /**
169 | * 页面上拉触底事件的处理函数
170 | */
171 | onReachBottom: function () {
172 |
173 | },
174 |
175 | /**
176 | * 用户点击右上角分享
177 | */
178 | onShareAppMessage: function () {
179 | return {
180 | title: '小程序实时解析声音',
181 | path: '/pages/recorder/index',
182 | imageUrl: '/static/img/share-img.png'
183 | }
184 | },
185 | onAddToFavorites() {
186 | return {
187 | title: '小程序实时解析声音',
188 | imageUrl: '/static/img/app-avatar.png'
189 | }
190 | },
191 | onShareTimeline() {
192 | return {
193 | title: '小程序实时解析声音',
194 | imageUrl: '/static/img/app-avatar.png'
195 | }
196 | }
197 | })
--------------------------------------------------------------------------------
/pages/recorder/index.json:
--------------------------------------------------------------------------------
1 | {
2 | "disableScroll": true,
3 | "usingComponents": {
4 | "ec-canvas": "/components/ec-canvas/ec-canvas"
5 | }
6 | }
--------------------------------------------------------------------------------
/pages/recorder/index.wxml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | 开始录音
8 | 继续录音
9 | 结束录音
10 |
--------------------------------------------------------------------------------
/pages/recorder/index.wxss:
--------------------------------------------------------------------------------
1 | /* pages/recorder/index.wxss */
2 | .container {
3 | display: flex;
4 | flex-direction: column;
5 | align-items: center;
6 | justify-content: center;
7 | height: 100vh;
8 | }
9 |
10 | .result {
11 | width: 100%;
12 | height: 50%;
13 | }
14 |
15 | .button {
16 | position: fixed;
17 | bottom: 40rpx;
18 | left: 40rpx;
19 | width: 670rpx;
20 | height: 100rpx;
21 | background: #ff6f00;
22 | border-radius: 12rpx;
23 | font-size: 32rpx;
24 | font-weight: 500;
25 | color: #FFFFFF;
26 | line-height: 100rpx;
27 | text-align: center;
28 | }
--------------------------------------------------------------------------------
/pages/recorder/utils.js:
--------------------------------------------------------------------------------
1 | /**
2 | * 把 PCM 文件解析成计算音量百分比
3 | * 返回值:0-100,主要当做百分比用;注意:这个不是分贝
4 | *
5 | * 计算音量参考 https://blog.csdn.net/jody1989/article/details/73480259
6 | * 更高灵敏度算法: 限定最大感应值 10000 线性曲线:低音量不友好 power/10000*100
7 | * 对数曲线:低音量友好,但需限定最低感应值 (1+Math.log10(power/10000))*100
8 | *
9 | * @param frameBuffer PCM 文件分片数据
10 | *
11 | */
12 | const pcmToPowerLevel = frameBuffer => {
13 | let level = 0;
14 | if (frameBuffer) {
15 | let arr = new Int16Array(frameBuffer);
16 | let pcmAbsSum = 0; // PCM Int16 所有采样的绝对值的和
17 | if (arr && arr.length) {
18 | arr.map(ele => {
19 | pcmAbsSum = pcmAbsSum + Math.abs(ele);
20 | })
21 | }
22 | const pcmLength = frameBuffer.byteLength; // PCM的字节长度
23 | let power = (pcmAbsSum / pcmLength) || 0;//NaN
24 | if (power < 1251) { // 1250的结果10%,更小的音量采用线性取值
25 | level = Math.round(power / 1250 * 10);
26 | } else {
27 | level = Math.round(Math.min(100, Math.max(0, (1 + Math.log(power / 10000) / Math.log(10)) * 100)));
28 | };
29 | }
30 | return level;
31 | };
32 | export default pcmToPowerLevel;
--------------------------------------------------------------------------------
/project.config.json:
--------------------------------------------------------------------------------
1 | {
2 | "description": "Project configuration file",
3 | "packOptions": {
4 | "ignore": []
5 | },
6 | "setting": {
7 | "urlCheck": false,
8 | "es6": false,
9 | "enhance": true,
10 | "postcss": true,
11 | "preloadBackgroundData": false,
12 | "minified": true,
13 | "newFeature": true,
14 | "coverView": true,
15 | "nodeModules": true,
16 | "autoAudits": false,
17 | "showShadowRootInWxmlPanel": true,
18 | "scopeDataCheck": false,
19 | "uglifyFileName": false,
20 | "checkInvalidKey": true,
21 | "checkSiteMap": true,
22 | "uploadWithSourceMap": true,
23 | "compileHotReLoad": false,
24 | "useMultiFrameRuntime": true,
25 | "useApiHook": true,
26 | "useApiHostProcess": true,
27 | "babelSetting": {
28 | "ignore": [],
29 | "disablePlugins": [],
30 | "outputPath": ""
31 | },
32 | "enableEngineNative": false,
33 | "bundle": false,
34 | "useIsolateContext": true,
35 | "useCompilerModule": true,
36 | "userConfirmedUseCompilerModuleSwitch": false,
37 | "userConfirmedBundleSwitch": false,
38 | "packNpmManually": true,
39 | "packNpmRelationList": [
40 | {
41 | "packageJsonPath": "./package.json",
42 | "miniprogramNpmDistDir": "/"
43 | }
44 | ],
45 | "minifyWXSS": true
46 | },
47 | "compileType": "miniprogram",
48 | "libVersion": "2.9.0",
49 | "appid": "wx85cb68c407c84cc0",
50 | "projectname": "tensorflow-wxapp",
51 | "debugOptions": {
52 | "hidedInDevtools": []
53 | },
54 | "isGameTourist": false,
55 | "simulatorType": "wechat",
56 | "simulatorPluginLibVersion": {},
57 | "condition": {
58 | "plugin": {
59 | "list": []
60 | },
61 | "game": {
62 | "list": []
63 | },
64 | "gamePlugin": {
65 | "list": []
66 | },
67 | "miniprogram": {
68 | "list": []
69 | }
70 | }
71 | }
--------------------------------------------------------------------------------
/sitemap.json:
--------------------------------------------------------------------------------
1 | {
2 | "desc": "关于本文件的更多信息,请参考文档 https://developers.weixin.qq.com/miniprogram/dev/framework/sitemap.html",
3 | "rules": [{
4 | "action": "allow",
5 | "page": "*"
6 | }]
7 | }
--------------------------------------------------------------------------------
/static/img/app-avatar.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yaun369/tensorflow-wxapp/6db99cc9a08d222565f759192d39e0bf2d4f05d3/static/img/app-avatar.png
--------------------------------------------------------------------------------
/static/img/coco-ssd.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yaun369/tensorflow-wxapp/6db99cc9a08d222565f759192d39e0bf2d4f05d3/static/img/coco-ssd.png
--------------------------------------------------------------------------------
/static/img/posenet.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yaun369/tensorflow-wxapp/6db99cc9a08d222565f759192d39e0bf2d4f05d3/static/img/posenet.png
--------------------------------------------------------------------------------
/static/img/powered-by-tensorflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yaun369/tensorflow-wxapp/6db99cc9a08d222565f759192d39e0bf2d4f05d3/static/img/powered-by-tensorflow.png
--------------------------------------------------------------------------------
/static/img/recorder.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yaun369/tensorflow-wxapp/6db99cc9a08d222565f759192d39e0bf2d4f05d3/static/img/recorder.png
--------------------------------------------------------------------------------
/static/img/share-img.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/yaun369/tensorflow-wxapp/6db99cc9a08d222565f759192d39e0bf2d4f05d3/static/img/share-img.png
--------------------------------------------------------------------------------
/static/svg/share-icon.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/static/svg/tensorflow-logo.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/utils/util.js:
--------------------------------------------------------------------------------
1 | export function getFrameSliceOptions(frameWidth, frameHeight, displayWidth, displayHeight) {
2 | let result = {
3 | start: [0, 0, 0],
4 | size: [-1, -1, 3]
5 | }
6 |
7 | const ratio = displayHeight / displayWidth
8 |
9 | if (ratio > frameHeight / frameWidth) {
10 | result.start = [0, Math.ceil((frameWidth - Math.ceil(frameHeight / ratio)) / 2), 0]
11 | result.size = [-1, Math.ceil(frameHeight / ratio), 3]
12 | } else {
13 | result.start = [Math.ceil((frameHeight - Math.floor(ratio * frameWidth)) / 2), 0, 0]
14 | result.size = [Math.ceil(ratio * frameWidth), -1, 3]
15 | }
16 |
17 | return result
18 | }
--------------------------------------------------------------------------------
/yarn.lock:
--------------------------------------------------------------------------------
1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
2 | # yarn lockfile v1
3 |
4 |
5 | "@tensorflow-models/coco-ssd@^2.1.0":
6 | version "2.1.0"
7 | resolved "https://registry.yarnpkg.com/@tensorflow-models/coco-ssd/-/coco-ssd-2.1.0.tgz#f6b133042dc9b427cd15ad67a8261e88e4143606"
8 | integrity sha512-OiNt6PkbLNaKXUS9Pp2USY236inZ5/aS6swkzqSiMztGF7ikctjTa98ZIMldG9yl6hNHZaH/in2ndxOGUQ6r5Q==
9 |
10 | "@tensorflow-models/posenet@^2.2.1":
11 | version "2.2.1"
12 | resolved "https://registry.yarnpkg.com/@tensorflow-models/posenet/-/posenet-2.2.1.tgz#2f0a68d909842f59eec195f23c6f3d4cebc53fa8"
13 | integrity sha512-n9/g6DfjAyrBTf/zt1haRCyWsgALxUCzg9/Ks3Y2mbYavRZVSCSTRPy/qlE5Hr4tLfyckGfDN14zmGTthNcg/g==
14 |
15 | "@tensorflow/tfjs-backend-cpu@2.7.0", "@tensorflow/tfjs-backend-cpu@^2.7.0":
16 | version "2.7.0"
17 | resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-2.7.0.tgz#f3a6c631279b0aab36c570e84343a014bc68da47"
18 | integrity sha512-R6ORcWq3ub81ABvBZEZ8Ok5OOT59B4AsRe66ds7B/NK0nN+k6y37bR3ZDVjgkEKNWNvzB7ydODikge3GNmgQIQ==
19 | dependencies:
20 | "@types/seedrandom" "2.4.27"
21 | seedrandom "2.4.3"
22 |
23 | "@tensorflow/tfjs-backend-webgl@^2.7.0":
24 | version "2.7.0"
25 | resolved "https://registry.yarnpkg.com/@tensorflow/tfjs-backend-webgl/-/tfjs-backend-webgl-2.7.0.tgz#e9899fd761fa38ab9914b1910e0bf38f5104a0c4"
26 | integrity sha512-K7Rk5YTSWOZ969EZvh3w786daPn2ub4mA2JsX7mXKhBPUaOP9dKbBdLj9buCuMcu4zVq2pAp0QwpHSa4PHm3xg==
27 | dependencies:
28 | "@tensorflow/tfjs-backend-cpu" "2.7.0"
29 | "@types/offscreencanvas" "~2019.3.0"
30 | "@types/seedrandom" "2.4.27"
31 | "@types/webgl-ext" "0.0.30"
32 | "@types/webgl2" "0.0.5"
33 | seedrandom "2.4.3"
34 |
35 | "@tensorflow/tfjs-converter@^2.7.0":
36 | version "2.7.0"
37 | resolved "https://registry.npm.taobao.org/@tensorflow/tfjs-converter/download/@tensorflow/tfjs-converter-2.7.0.tgz#24acdd8964b264ba4839f7a3466f8bacafb98c13"
38 | integrity sha1-JKzdiWSyZLpIOfejRm+LrK+5jBM=
39 |
40 | "@tensorflow/tfjs-core@^2.7.0":
41 | version "2.7.0"
42 | resolved "https://registry.npm.taobao.org/@tensorflow/tfjs-core/download/@tensorflow/tfjs-core-2.7.0.tgz#3bc24d360c388dbd87601b7823a4d7a1b67f324b"
43 | integrity sha1-O8JNNgw4jb2HYBt4I6TXobZ/Mks=
44 | dependencies:
45 | "@types/offscreencanvas" "~2019.3.0"
46 | "@types/seedrandom" "2.4.27"
47 | "@types/webgl-ext" "0.0.30"
48 | node-fetch "~2.6.1"
49 | seedrandom "2.4.3"
50 |
51 | "@types/offscreencanvas@~2019.3.0":
52 | version "2019.3.0"
53 | resolved "https://registry.npm.taobao.org/@types/offscreencanvas/download/@types/offscreencanvas-2019.3.0.tgz#3336428ec7e9180cf4566dfea5da04eb586a6553"
54 | integrity sha1-MzZCjsfpGAz0Vm3+pdoE61hqZVM=
55 |
56 | "@types/seedrandom@2.4.27":
57 | version "2.4.27"
58 | resolved "https://registry.npm.taobao.org/@types/seedrandom/download/@types/seedrandom-2.4.27.tgz#9db563937dd86915f69092bc43259d2f48578e41"
59 | integrity sha1-nbVjk33YaRX2kJK8QyWdL0hXjkE=
60 |
61 | "@types/webgl-ext@0.0.30":
62 | version "0.0.30"
63 | resolved "https://registry.npm.taobao.org/@types/webgl-ext/download/@types/webgl-ext-0.0.30.tgz#0ce498c16a41a23d15289e0b844d945b25f0fb9d"
64 | integrity sha1-DOSYwWpBoj0VKJ4LhE2UWyXw+50=
65 |
66 | "@types/webgl2@0.0.5":
67 | version "0.0.5"
68 | resolved "https://registry.yarnpkg.com/@types/webgl2/-/webgl2-0.0.5.tgz#dd925e20ab8ace80eb4b1e46fda5b109c508fb0d"
69 | integrity sha512-oGaKsBbxQOY5+aJFV3KECDhGaXt+yZJt2y/OZsnQGLRkH6Fvr7rv4pCt3SRH1somIHfej/c4u7NSpCyd9x+1Ow==
70 |
71 | "@vant/weapp@^1.6.1":
72 | version "1.6.1"
73 | resolved "https://registry.yarnpkg.com/@vant/weapp/-/weapp-1.6.1.tgz#01c6f395c0a67f56b8caae36a551d268b1fe0605"
74 | integrity sha512-HkQHodqmSPB0bTILo5DnM/avkpeSBfQHo42kN4HJa504SCNIc2DoF0VrAl/CaZB+2DMrElnEnM+//vf5rENtNQ==
75 |
76 | fetch-wechat@^0.0.3:
77 | version "0.0.3"
78 | resolved "https://registry.npm.taobao.org/fetch-wechat/download/fetch-wechat-0.0.3.tgz#2789c332a24bf9f4114b580c02d2934646e74f12"
79 | integrity sha1-J4nDMqJL+fQRS1gMAtKTRkbnTxI=
80 |
81 | node-fetch@~2.6.1:
82 | version "2.6.1"
83 | resolved "https://registry.npm.taobao.org/node-fetch/download/node-fetch-2.6.1.tgz?cache=0&sync_timestamp=1599309120224&other_urls=https%3A%2F%2Fregistry.npm.taobao.org%2Fnode-fetch%2Fdownload%2Fnode-fetch-2.6.1.tgz#045bd323631f76ed2e2b55573394416b639a0052"
84 | integrity sha1-BFvTI2Mfdu0uK1VXM5RBa2OaAFI=
85 |
86 | seedrandom@2.4.3:
87 | version "2.4.3"
88 | resolved "https://registry.npm.taobao.org/seedrandom/download/seedrandom-2.4.3.tgz#2438504dad33917314bff18ac4d794f16d6aaecc"
89 | integrity sha1-JDhQTa0zkXMUv/GKxNeU8W1qrsw=
90 |
--------------------------------------------------------------------------------