├── app
├── assets
│ ├── icon.png
│ └── splash.png
├── babel.config.js
├── .expo-shared
│ └── assets.json
├── .gitignore
├── tsconfig.json
├── README.md
├── app.json
├── package.json
├── components
│ ├── Server.tsx
│ ├── ProgressIndicator.tsx
│ └── Base64.tsx
└── App.tsx
├── .gitignore
├── server
├── requirements.txt
├── README.md
└── src
│ ├── script.js
│ ├── ps.py
│ └── main.py
├── LICENSE
└── README.md
/app/assets/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyrildiagne/ar-cutpaste/HEAD/app/assets/icon.png
--------------------------------------------------------------------------------
/app/assets/splash.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cyrildiagne/ar-cutpaste/HEAD/app/assets/splash.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | venv
2 | .vscode
3 | .DS_Store
4 | .pyc
5 | node_modules
6 | __pycache__
7 | server/*.png
8 | server/*.jpg
9 | *.psd
--------------------------------------------------------------------------------
/app/babel.config.js:
--------------------------------------------------------------------------------
1 | module.exports = function(api) {
2 | api.cache(true);
3 | return {
4 | presets: ['babel-preset-expo'],
5 | };
6 | };
7 |
--------------------------------------------------------------------------------
/server/requirements.txt:
--------------------------------------------------------------------------------
1 | screenpoint==0.1.1
2 | photoshop-connection==0.1.1
3 | Flask==1.1.1
4 | flask-cors==3.0.9
5 | pyscreenshot==1.0
6 | Pillow==8.3.2
7 | requests==2.23.0
--------------------------------------------------------------------------------
/app/.expo-shared/assets.json:
--------------------------------------------------------------------------------
1 | {
2 | "12bb71342c6255bbf50437ec8f4441c083f47cdb74bd89160c15e4f43e52a1cb": true,
3 | "40b842e832070c58deac6aa9e08fa459302ee3f9da492c7e77d93d2fbf4a56fd": true
4 | }
5 |
--------------------------------------------------------------------------------
/app/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/**/*
2 | .expo/*
3 | npm-debug.*
4 | *.jks
5 | *.p8
6 | *.p12
7 | *.key
8 | *.mobileprovision
9 | *.orig.*
10 | web-build/
11 | web-report/
12 |
13 | # macOS
14 | .DS_Store
15 |
--------------------------------------------------------------------------------
/app/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "allowSyntheticDefaultImports": true,
4 | "jsx": "react-native",
5 | "lib": ["dom", "esnext"],
6 | "moduleResolution": "node",
7 | "noEmit": true,
8 | "skipLibCheck": true,
9 | "resolveJsonModule": true,
10 | "strict": true
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/app/README.md:
--------------------------------------------------------------------------------
1 | # AR Cut Paste Mobile App
2 |
3 | An [Expo](expo.io) / [React Native](#) mobile application.
4 | Please follow instructions from the [expo website](https://expo.io/learn) to see how to preview the app on your phone using the Expo app.
5 |
6 | ## Setup
7 |
8 | ```bash
9 | npm install
10 | ```
11 |
12 | Then update the IP address in `components/Server.tsx` to point to the IP address of the computer running the local server:
13 | ```js
14 | 3: const URL = "http://192.168.1.29:8080";
15 | ```
16 |
17 | ## Run
18 |
19 | ```bash
20 | npm start
21 | ```
22 |
--------------------------------------------------------------------------------
/app/app.json:
--------------------------------------------------------------------------------
1 | {
2 | "expo": {
3 | "name": "app",
4 | "slug": "app",
5 | "platforms": [
6 | "ios",
7 | "android",
8 | "web"
9 | ],
10 | "version": "1.0.0",
11 | "orientation": "portrait",
12 | "icon": "./assets/icon.png",
13 | "splash": {
14 | "image": "./assets/splash.png",
15 | "resizeMode": "contain",
16 | "backgroundColor": "#ffffff"
17 | },
18 | "updates": {
19 | "fallbackToCacheTimeout": 0
20 | },
21 | "assetBundlePatterns": [
22 | "**/*"
23 | ],
24 | "ios": {
25 | "supportsTablet": true
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/server/README.md:
--------------------------------------------------------------------------------
1 | # AR Cut Paste local server
2 |
3 | ## Setup
4 |
5 | ```console
6 | virtualenv -p python3.7 venv
7 | source venv/bin/activate
8 | pip install -r requirements.txt
9 | ```
10 |
11 | ## Run
12 |
13 | The `BASNET_SERVICE_HOST` is optional (only needed if you've deployed the service
14 | on a platform using an ingress gateway such as Knative / Cloud Run).
15 |
16 | Replace `123456` by your Photoshop remote connection password.
17 |
18 | ```console
19 | python src/main.py \
20 | --basnet_service_ip="http://X.X.X.X" \
21 | --basnet_service_host="basnet-http.default.example.com" \
22 | --photoshop_password 123456
23 | ```
24 |
--------------------------------------------------------------------------------
/server/src/script.js:
--------------------------------------------------------------------------------
1 | function pasteImage(filename, layerName, x, y) {
2 | var fileRef = new File(filename);
3 | var doc = app.activeDocument;
4 |
5 | doc.artLayers.add();
6 | var curr_file = app.open(fileRef);
7 | curr_file.selection.selectAll();
8 | curr_file.selection.copy();
9 | curr_file.close();
10 |
11 | doc.paste();
12 | doc.activeLayer.name = layerName;
13 | doc.activeLayer.translate(x, y);
14 | try {
15 | doc.activeLayer.move(doc.layers[doc.layers.length - 1], ElementPlacement.PLACEBEFORE);
16 | } catch(e) {
17 | alert(e);
18 | }
19 | }
20 |
21 | function getTopLeft() {
22 | try {
23 | var r = new ActionReference();
24 | executeActionGet(r)
25 | .getObjectValue(stringIDToTypeID("viewInfo"))
26 | .getObjectValue(stringIDToTypeID("activeView"))
27 | .getObjectValue(stringIDToTypeID("globalBounds"));
28 | alert(t)
29 | } catch (e) {
30 | alert(e);
31 | }
32 | }
33 |
34 |
--------------------------------------------------------------------------------
/app/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "main": "node_modules/expo/AppEntry.js",
3 | "scripts": {
4 | "start": "expo start",
5 | "android": "expo start --android",
6 | "ios": "expo start --ios",
7 | "web": "expo start --web",
8 | "eject": "expo eject"
9 | },
10 | "dependencies": {
11 | "expo": "~37.0.3",
12 | "expo-2d-context": "0.0.2",
13 | "expo-asset": "~8.1.4",
14 | "expo-camera": "~8.2.0",
15 | "expo-gl": "~8.1.0",
16 | "expo-image-manipulator": "~8.1.0",
17 | "expo-permissions": "~8.1.0",
18 | "mem": "^4.0.0",
19 | "react": "~16.9.0",
20 | "react-dom": "~16.9.0",
21 | "react-native": "https://github.com/expo/react-native/archive/sdk-37.0.1.tar.gz",
22 | "react-native-screens": "~2.2.0",
23 | "react-native-svg": "11.0.1",
24 | "react-native-web": "~0.11.7"
25 | },
26 | "devDependencies": {
27 | "@babel/core": "^7.8.6",
28 | "@types/react": "~16.9.23",
29 | "@types/react-native": "~0.61.17",
30 | "babel-preset-expo": "~8.1.0",
31 | "typescript": "~3.8.3"
32 | },
33 | "private": true
34 | }
35 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Cyril Diagne
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/server/src/ps.py:
--------------------------------------------------------------------------------
1 | from photoshop import PhotoshopConnection
2 | from os.path import dirname, basename
3 |
4 | # TODO: This offset should be detected by getTopLeft() but the new version
5 | # of Photoshop doesn't seem to support executeActionGet so we put it
6 | # manually here in the meantime.
7 | SCREEN_PIXELS_DENSITY = 2
8 | DOC_OFFSET_X = 74 * SCREEN_PIXELS_DENSITY
9 | DOC_OFFSET_Y = 130 * SCREEN_PIXELS_DENSITY
10 | DOC_WIDTH = 2121
11 | DOC_HEIGHT = 1280
12 |
13 | def paste(filename, name, x, y, password='123456'):
14 |
15 | # There seem to be a bug on Windows where the path must be using unix separators.
16 | # https://github.com/cyrildiagne/ar-cutpaste/issues/5
17 | filename = filename.replace('\\', '/')
18 |
19 | with PhotoshopConnection(password=password) as conn:
20 | script = open(basename(dirname(__file__)) + '/script.js', 'r').read()
21 | x -= DOC_WIDTH * 0.5 + DOC_OFFSET_X
22 | y -= DOC_HEIGHT * 0.5 + DOC_OFFSET_Y
23 | script += f'pasteImage("{filename}", "{name}", {x}, {y})'
24 | result = conn.execute(script)
25 | print(result)
26 | if result['status'] != 0:
27 | return result
28 |
29 | return None
30 |
--------------------------------------------------------------------------------
/app/components/Server.tsx:
--------------------------------------------------------------------------------
1 | import Base64 from "./Base64";
2 |
3 | const URL = "http://192.168.1.29:8080";
4 |
5 | function arrayBufferToBase64(buffer: ArrayBuffer) {
6 | let binary = "";
7 | const bytes = [].slice.call(new Uint8Array(buffer));
8 | bytes.forEach((b) => (binary += String.fromCharCode(b)));
9 | return Base64.btoa(binary);
10 | }
11 |
12 | function ping() {
13 | fetch(URL + "/ping").catch((e) => console.error(e));
14 | }
15 |
16 | async function cut(imageURI: string) {
17 | const formData = new FormData();
18 | formData.append("data", {
19 | uri: imageURI,
20 | name: "photo",
21 | type: "image/jpg",
22 | });
23 |
24 | const resp = await fetch(URL + "/cut", {
25 | method: "POST",
26 | body: formData,
27 | }).then(async (res) => {
28 | console.log("> converting...");
29 | const buffer = await res.arrayBuffer();
30 | const base64Flag = "data:image/png;base64,";
31 | const imageStr = arrayBufferToBase64(buffer);
32 | return base64Flag + imageStr;
33 | });
34 |
35 | return resp;
36 | }
37 |
38 | async function paste(imageURI: string) {
39 | const formData = new FormData();
40 | formData.append("data", {
41 | uri: imageURI,
42 | name: "photo",
43 | type: "image/jpg",
44 | });
45 |
46 | const resp = await fetch(URL + "/paste", {
47 | method: "POST",
48 | body: formData,
49 | }).then((r) => r.json());
50 |
51 | return resp;
52 | }
53 |
54 | export default {
55 | ping,
56 | cut,
57 | paste,
58 | };
59 |
--------------------------------------------------------------------------------
/app/components/ProgressIndicator.tsx:
--------------------------------------------------------------------------------
1 | // @refresh reset
2 |
3 | import React, { useState, useEffect } from "react";
4 | import { View, Animated, StyleSheet } from "react-native";
5 | import Svg, { Circle } from "react-native-svg";
6 |
7 | const AnimatedCircle = Animated.createAnimatedComponent(Circle);
8 |
9 | const numX = 4;
10 | const numY = 5;
11 | const total = numX * numY;
12 |
13 | const styles = StyleSheet.create({
14 | container: {
15 | ...StyleSheet.absoluteFillObject,
16 | alignItems: "center",
17 | justifyContent: "center",
18 | },
19 | });
20 | export default function ProgressIndicator() {
21 | const init = Array(total)
22 | .fill(1)
23 | .map((x) => ({ r: new Animated.Value(1), a: new Animated.Value(1) }));
24 | const [anim, setAnim] = useState(init);
25 |
26 | useEffect(() => {
27 | console.log("update");
28 | const c = anim.map((v, i: number) => {
29 | const t = 400 + Math.random() * 300;
30 | const seq = Animated.parallel([
31 | Animated.sequence([
32 | Animated.timing(anim[i].r, { toValue: 3, duration: t - 50 }),
33 | Animated.timing(anim[i].r, { toValue: 1, duration: t }),
34 | ]),
35 | Animated.sequence([
36 | Animated.timing(anim[i].a, { toValue: 0.1, duration: t - 50 }),
37 | Animated.timing(anim[i].a, { toValue: 1, duration: t }),
38 | ]),
39 | ]);
40 | return Animated.loop(seq);
41 | });
42 | // console.log(c)
43 | Animated.parallel(c).start();
44 | }, []);
45 |
46 | let circles = [];
47 | const margin = 100 / (numX);
48 | for (let x = 0; x < numX; x++) {
49 | for (let y = 0; y < numY; y++) {
50 | const i = y * numX + x;
51 | circles.push({
52 | x: (x + 0.5) * margin,
53 | y: (y) * margin,
54 | r: anim[i].r,
55 | a: anim[i].a,
56 | });
57 | }
58 | }
59 |
60 | return (
61 |
62 |
74 |
75 | );
76 | }
77 |
--------------------------------------------------------------------------------
/app/components/Base64.tsx:
--------------------------------------------------------------------------------
1 | // https://stackoverflow.com/questions/42829838/react-native-atob-btoa-not-working-without-remote-js-debugging
2 | const chars =
3 | "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
4 | const Base64 = {
5 | btoa: (input: string = "") => {
6 | let str = input;
7 | let output = "";
8 |
9 | for (
10 | let block = 0, charCode, i = 0, map = chars;
11 | str.charAt(i | 0) || ((map = "="), i % 1);
12 | output += map.charAt(63 & (block >> (8 - (i % 1) * 8)))
13 | ) {
14 | charCode = str.charCodeAt((i += 3 / 4));
15 |
16 | if (charCode > 0xff) {
17 | throw new Error(
18 | "'btoa' failed: The string to be encoded contains characters outside of the Latin1 range."
19 | );
20 | }
21 |
22 | block = (block << 8) | charCode;
23 | }
24 |
25 | return output;
26 | },
27 |
28 | atob: (input: string = "") => {
29 | let str = input.replace(/=+$/, "");
30 | let output = "";
31 |
32 | if (str.length % 4 == 1) {
33 | throw new Error(
34 | "'atob' failed: The string to be decoded is not correctly encoded."
35 | );
36 | }
37 | for (
38 | let bc = 0, bs = 0, buffer, i = 0;
39 | (buffer = str.charAt(i++));
40 | ~buffer && ((bs = bc % 4 ? bs * 64 + buffer : buffer), bc++ % 4)
41 | ? (output += String.fromCharCode(255 & (bs >> ((-2 * bc) & 6))))
42 | : 0
43 | ) {
44 | buffer = chars.indexOf(buffer);
45 | }
46 |
47 | return output;
48 | },
49 | };
50 |
51 | FileReader.prototype.readAsArrayBuffer = function (blob) {
52 | if (this.readyState === this.LOADING) throw new Error("InvalidStateError");
53 | this._setReadyState(this.LOADING);
54 | this._result = null;
55 | this._error = null;
56 | const fr = new FileReader();
57 | fr.onloadend = () => {
58 | const content = Base64.atob(
59 | fr.result.substr(fr.result.indexOf(',') + 1)
60 | );
61 | const buffer = new ArrayBuffer(content.length);
62 | const view = new Uint8Array(buffer);
63 | view.set(Array.from(content).map((c) => c.charCodeAt(0)));
64 | this._result = buffer;
65 | this._setReadyState(this.DONE);
66 | };
67 | fr.readAsDataURL(blob);
68 | };
69 |
70 | // from: https://stackoverflow.com/questions/42829838/react-native-atob-btoa-not-working-without-remote-js-debugging
71 | // const chars =
72 | // "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";
73 | // const atob = (input = "") => {
74 | // let str = input.replace(/=+$/, "");
75 | // let output = "";
76 |
77 | // if (str.length % 4 == 1) {
78 | // throw new Error(
79 | // "'atob' failed: The string to be decoded is not correctly encoded."
80 | // );
81 | // }
82 | // for (
83 | // let bc = 0, bs = 0, buffer, i = 0;
84 | // (buffer = str.charAt(i++));
85 | // ~buffer && ((bs = bc % 4 ? bs * 64 + buffer : buffer), bc++ % 4)
86 | // ? (output += String.fromCharCode(255 & (bs >> ((-2 * bc) & 6))))
87 | // : 0
88 | // ) {
89 | // buffer = chars.indexOf(buffer);
90 | // }
91 |
92 | // return output;
93 | // };
94 |
95 | export default Base64;
96 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # AR Cut & Paste
2 |
3 | An AR+ML prototype that allows cutting elements from your surroundings and pasting them in an image editing software.
4 |
5 | Although only Photoshop is being handled currently, it may handle different outputs in the future.
6 |
7 | Demo & more infos: [Thread](https://twitter.com/cyrildiagne/status/1256916982764646402)
8 |
9 | ⚠️ This is a research prototype and not a consumer / photoshop user tool.
10 |
11 | **Update 2020.05.11:** If you're looking for an easy to use app based on this research, head over to https://clipdrop.co
12 |
13 | ## Modules
14 |
15 | This prototype runs as 3 independent modules:
16 |
17 | - **The mobile app**
18 |
19 | - Check out the [/app](/app) folder for instructions on how to deploy the app to your mobile.
20 |
21 | - **The local server**
22 |
23 | - The interface between the mobile app and Photoshop.
24 | - It finds the position pointed on screen by the camera using [screenpoint](https://github.com/cyrildiagne/screenpoint)
25 | - Check out the [/server](/server) folder for instructions on configuring the local server
26 |
27 | - **The object detection / background removal service**
28 |
29 | - For now, the salience detection and background removal are delegated to an external service
30 | - It would be a lot simpler to use something like [DeepLap](https://github.com/shaqian/tflite-react-native) directly within the mobile app. But that hasn't been implemented in this repo yet.
31 |
32 | ## Usage
33 |
34 | ### 1 - Configure Photoshop
35 |
36 | - Go to "Preferences > Plug-ins", enable "Remote Connection" and set a friendly password that you'll need later.
37 | - Make sure that your PS document settings match those in ```server/src/ps.py```, otherwise only an empty layer will be pasted.
38 | - Also make sure that your document has some sort of background. If the background is just blank, SIFT will probably not have enough feature to do a correct match.
39 |
40 |
48 |
49 | ### 2 - Setup the external salience object detection service
50 |
51 | #### Option 1: Set up your own model service (requires a CUDA GPU)
52 |
53 | - As mentioned above, for the time being, you must deploy the
54 | BASNet model (Qin & al, CVPR 2019) as an external HTTP service using this [BASNet-HTTP wrapper](https://github.com/cyrildiagne/basnet-http) (requires a CUDA GPU)
55 |
56 | - You will need the deployed service URL to configure the local server
57 |
58 | - Make sure to configure a different port if you're running BASNet on the same computer as the local service
59 |
60 | #### Option 2: Use a community provided endpoint
61 |
62 | A public endpoint has been provided by members of the community. This is useful if you don't have your own CUDA GPU or do not want to go through the process of running the servce on your own.
63 |
64 | Use this endpoint by launching the local server with `--basnet_service_ip http://u2net-predictor.tenant-compass.global.coreweave.com`
65 |
66 | ### 3 - Configure and run the local server
67 |
68 | - Follow the instructions in [/server](/server) to setup & run the local server.
69 |
70 | ### 4 - Configure and run the mobile app
71 |
72 | - Follow the instructions in [/app](/app) to setup & deploy the mobile app.
73 |
74 | ## Thanks and Acknowledgements
75 |
76 | - [BASNet code](https://github.com/NathanUA/BASNet) for '[*BASNet: Boundary-Aware Salient Object Detection*](http://openaccess.thecvf.com/content_CVPR_2019/html/Qin_BASNet_Boundary-Aware_Salient_Object_Detection_CVPR_2019_paper.html) [code](https://github.com/NathanUA/BASNet)', [Xuebin Qin](https://webdocs.cs.ualberta.ca/~xuebin/), [Zichen Zhang](https://webdocs.cs.ualberta.ca/~zichen2/), [Chenyang Huang](https://chenyangh.com/), [Chao Gao](https://cgao3.github.io/), [Masood Dehghan](https://sites.google.com/view/masoodd) and [Martin Jagersand](https://webdocs.cs.ualberta.ca/~jag/)
77 | - RunwayML for the [Photoshop paste code](https://github.com/runwayml/RunwayML-for-Photoshop/blob/master/host/index.jsx)
78 | - [CoreWeave](https://www.coreweave.com) for hosting the public U^2Net model endpoint on Tesla V100s
79 |
--------------------------------------------------------------------------------
/app/App.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState, useEffect } from "react";
2 | import {
3 | Text,
4 | View,
5 | Image,
6 | TouchableWithoutFeedback,
7 | StyleSheet,
8 | } from "react-native";
9 | import * as ImageManipulator from "expo-image-manipulator";
10 | import { Camera } from "expo-camera";
11 |
12 | import ProgressIndicator from "./components/ProgressIndicator";
13 | import server from "./components/Server";
14 |
15 | const styles = StyleSheet.create({
16 | resultImgView: {
17 | position: "absolute",
18 | zIndex: 200,
19 | top: 0,
20 | left: 0,
21 | width: "100%",
22 | height: "100%",
23 | },
24 | resultImg: {
25 | position: "absolute",
26 | zIndex: 300,
27 | top: "25%",
28 | left: 0,
29 | width: "100%",
30 | height: "50%",
31 | },
32 | });
33 |
34 | interface State {
35 | hasPermission: boolean;
36 | type: any;
37 | camera: any;
38 | currImgSrc: string | null;
39 | }
40 |
41 | export default function App() {
42 | const [state, setState] = useState({
43 | hasPermission: false,
44 | type: Camera.Constants.Type.back,
45 | camera: null,
46 | currImgSrc: "",
47 | } as State);
48 |
49 | const [pressed, setPressed] = useState(false);
50 | const [pasting, setPasting] = useState(false);
51 |
52 | let camera: any = null;
53 |
54 | useEffect(() => {
55 | (async () => {
56 | // Ping the server on startup.
57 | server.ping();
58 | // Request permission.
59 | const { status } = await Camera.requestPermissionsAsync();
60 | const hasPermission = status === "granted" ? true : false;
61 | setState({ ...state, hasPermission });
62 | })();
63 | }, []);
64 |
65 | async function cut(): Promise {
66 | const start = Date.now();
67 | console.log("");
68 | console.log("Cut");
69 |
70 | console.log(camera.pictureSize);
71 | // const ratios = await camera.getSupportedRatiosAsync()
72 | // console.log(ratios)
73 | // const sizes = await camera.getAvailablePictureSizeAsync("2:1")
74 | // console.log(sizes)
75 |
76 | console.log("> taking image...");
77 | const opts = { skipProcessing: true, exif: false, quality: 0 };
78 | // const opts = {};
79 | let photo = await camera.takePictureAsync(opts);
80 |
81 | console.log("> resizing...");
82 | const { uri } = await ImageManipulator.manipulateAsync(
83 | photo.uri,
84 | [
85 | { resize: { width: 256, height: 512 } },
86 | { crop: { originX: 0, originY: 128, width: 256, height: 256 } },
87 | // { resize: { width: 256, height: 457 } },
88 | // { crop: { originX: 0, originY: 99, width: 256, height: 256 } },
89 | // { resize: { width: 256, height: 341 } },
90 | // { crop: { originX: 0, originY: 42, width: 256, height: 256 } },
91 | ]
92 | // { compress: 0, format: ImageManipulator.SaveFormat.JPEG, base64: false }
93 | );
94 |
95 | console.log("> sending to /cut...");
96 | const resp = await server.cut(uri);
97 |
98 | console.log(`Done in ${((Date.now() - start) / 1000).toFixed(3)}s`);
99 | return resp;
100 | }
101 |
102 | async function paste() {
103 | const start = Date.now();
104 | console.log("");
105 | console.log("Paste");
106 |
107 | console.log("> taking image...");
108 | // const opts = { skipProcessing: true, exif: false };
109 | const opts = {};
110 | let photo = await camera.takePictureAsync(opts);
111 |
112 | console.log("> resizing...");
113 | const { uri } = await ImageManipulator.manipulateAsync(photo.uri, [
114 | // { resize: { width: 512, height: 1024 } },
115 | { resize: { width: 350, height: 700 } },
116 | ]);
117 |
118 | console.log("> sending to /paste...");
119 | try {
120 | const resp = await server.paste(uri);
121 | if (resp.status !== "ok") {
122 | if (resp.status === "screen not found") {
123 | console.log("screen not found");
124 | } else {
125 | throw new Error(resp);
126 | }
127 | }
128 | } catch (e) {
129 | console.error("error pasting:", e);
130 | }
131 |
132 | console.log(`Done in ${((Date.now() - start) / 1000).toFixed(3)}s`);
133 | }
134 |
135 | async function onPressIn() {
136 | setPressed(true);
137 |
138 | const resp = await cut();
139 |
140 | // Check if we're still pressed.
141 | // if (pressed) {
142 | setState({ ...state, currImgSrc: resp });
143 | // }
144 | }
145 |
146 | async function onPressOut() {
147 | setPressed(false);
148 | setPasting(true);
149 |
150 | if (state.currImgSrc !== "") {
151 | await paste();
152 | setState({ ...state, currImgSrc: "" });
153 | setPasting(false);
154 | }
155 | }
156 |
157 | if (state.hasPermission === null) {
158 | return ;
159 | }
160 | if (state.hasPermission === false) {
161 | return No access to camera;
162 | }
163 |
164 | let camOpacity = 1;
165 | if (pressed && state.currImgSrc !== "") {
166 | camOpacity = 0.8;
167 | }
168 |
169 | return (
170 |
171 |
174 | (camera = ref)}
181 | >
182 |
183 |
190 |
191 |
192 |
193 | {pressed && state.currImgSrc !== "" ? (
194 | <>
195 |
196 |
201 |
202 | >
203 | ) : null}
204 |
205 | {(pressed && state.currImgSrc === "") || pasting ? : null}
206 |
207 | );
208 | }
209 |
--------------------------------------------------------------------------------
/server/src/main.py:
--------------------------------------------------------------------------------
1 | import io
2 | import os
3 | from flask import Flask, request, jsonify, send_file
4 | from flask_cors import CORS
5 | from PIL import Image
6 | import numpy as np
7 | import time
8 | import screenpoint
9 | from datetime import datetime
10 | import pyscreenshot
11 | import requests
12 | import logging
13 | import argparse
14 |
15 | import ps
16 |
17 | logging.basicConfig(level=logging.INFO)
18 |
19 | parser = argparse.ArgumentParser()
20 | parser.add_argument('--photoshop_password', default='123456')
21 | parser.add_argument('--basnet_service_ip', required=True, help="The BASNet service IP address")
22 | parser.add_argument('--basnet_service_host', help="Optional, the BASNet service host")
23 | args = parser.parse_args()
24 |
25 | max_view_size = 700
26 | max_screenshot_size = 400
27 |
28 | # Initialize the Flask application.
29 | app = Flask(__name__)
30 | CORS(app)
31 |
32 |
33 | # Simple probe.
34 | @app.route('/', methods=['GET'])
35 | def hello():
36 | return 'Hello AR Cut Paste!'
37 |
38 | # Ping to wake up the BASNet service.
39 | @app.route('/ping', methods=['GET'])
40 | def ping():
41 | logging.info('ping')
42 | r = requests.get(args.basnet_service_ip, headers={'Host': args.basnet_service_host})
43 | logging.info(f'pong: {r.status_code} {r.content}')
44 | return 'pong'
45 |
46 |
47 | # The cut endpoints performs the salience detection / background removal.
48 | # And store a copy of the result to be pasted later.
49 | @app.route('/cut', methods=['POST'])
50 | def save():
51 | start = time.time()
52 | logging.info(' CUT')
53 |
54 | # Convert string of image data to uint8.
55 | if 'data' not in request.files:
56 | return jsonify({
57 | 'status': 'error',
58 | 'error': 'missing file param `data`'
59 | }), 400
60 | data = request.files['data'].read()
61 | if len(data) == 0:
62 | return jsonify({'status:': 'error', 'error': 'empty image'}), 400
63 |
64 | # Save debug locally.
65 | with open('cut_received.jpg', 'wb') as f:
66 | f.write(data)
67 |
68 | # Send to BASNet service.
69 | logging.info(' > sending to BASNet...')
70 | headers = {}
71 | if args.basnet_service_host is not None:
72 | headers['Host'] = args.basnet_service_host
73 | files= {'data': open('cut_received.jpg', 'rb')}
74 | res = requests.post(args.basnet_service_ip, headers=headers, files=files )
75 | # logging.info(res.status_code)
76 |
77 | # Save mask locally.
78 | logging.info(' > saving results...')
79 | with open('cut_mask.png', 'wb') as f:
80 | f.write(res.content)
81 | # shutil.copyfileobj(res.raw, f)
82 |
83 | logging.info(' > opening mask...')
84 | mask = Image.open('cut_mask.png').convert("L")
85 |
86 | # Convert string data to PIL Image.
87 | logging.info(' > compositing final image...')
88 | ref = Image.open(io.BytesIO(data))
89 | empty = Image.new("RGBA", ref.size, 0)
90 | img = Image.composite(ref, empty, mask)
91 |
92 | # TODO: currently hack to manually scale up the images. Ideally this would
93 | # be done respective to the view distance from the screen.
94 | img_scaled = img.resize((img.size[0] * 3, img.size[1] * 3))
95 |
96 | # Save locally.
97 | logging.info(' > saving final image...')
98 | img_scaled.save('cut_current.png')
99 |
100 | # Save to buffer
101 | buff = io.BytesIO()
102 | img.save(buff, 'PNG')
103 | buff.seek(0)
104 |
105 | # Print stats
106 | logging.info(f'Completed in {time.time() - start:.2f}s')
107 |
108 | # Return data
109 | return send_file(buff, mimetype='image/png')
110 |
111 |
112 | # The paste endpoints handles new paste requests.
113 | @app.route('/paste', methods=['POST'])
114 | def paste():
115 | start = time.time()
116 | logging.info(' PASTE')
117 |
118 | # Convert string of image data to uint8.
119 | if 'data' not in request.files:
120 | return jsonify({
121 | 'status': 'error',
122 | 'error': 'missing file param `data`'
123 | }), 400
124 | data = request.files['data'].read()
125 | if len(data) == 0:
126 | return jsonify({'status:': 'error', 'error': 'empty image'}), 400
127 |
128 | # Save debug locally.
129 | with open('paste_received.jpg', 'wb') as f:
130 | f.write(data)
131 |
132 | # Convert string data to PIL Image.
133 | logging.info(' > loading image...')
134 | view = Image.open(io.BytesIO(data))
135 |
136 | # Ensure the view image size is under max_view_size.
137 | if view.size[0] > max_view_size or view.size[1] > max_view_size:
138 | view.thumbnail((max_view_size, max_view_size))
139 |
140 | # Take screenshot with pyscreenshot.
141 | logging.info(' > grabbing screenshot...')
142 | screen = pyscreenshot.grab()
143 | screen_width, screen_height = screen.size
144 |
145 | # Ensure screenshot is under max size.
146 | if screen.size[0] > max_screenshot_size or screen.size[1] > max_screenshot_size:
147 | screen.thumbnail((max_screenshot_size, max_screenshot_size))
148 |
149 | # Finds view centroid coordinates in screen space.
150 | logging.info(' > finding projected point...')
151 | view_arr = np.array(view.convert('L'))
152 | screen_arr = np.array(screen.convert('L'))
153 | # logging.info(f'{view_arr.shape}, {screen_arr.shape}')
154 | x, y = screenpoint.project(view_arr, screen_arr, False)
155 |
156 | found = x != -1 and y != -1
157 |
158 | if found:
159 | # Bring back to screen space
160 | x = int(x / screen.size[0] * screen_width)
161 | y = int(y / screen.size[1] * screen_height)
162 | logging.info(f'{x}, {y}')
163 |
164 | # Paste the current image in photoshop at these coordinates.
165 | logging.info(' > sending to photoshop...')
166 | name = datetime.today().strftime('%Y-%m-%d-%H:%M:%S')
167 | img_path = os.path.join(os.getcwd(), 'cut_current.png')
168 | err = ps.paste(img_path, name, x, y, password=args.photoshop_password)
169 | if err is not None:
170 | logging.error('error sending to photoshop')
171 | logging.error(err)
172 | jsonify({'status': 'error sending to photoshop'})
173 | else:
174 | logging.info('screen not found')
175 |
176 | # Print stats.
177 | logging.info(f'Completed in {time.time() - start:.2f}s')
178 |
179 | # Return status.
180 | if found:
181 | return jsonify({'status': 'ok'})
182 | else:
183 | return jsonify({'status': 'screen not found'})
184 |
185 |
186 | if __name__ == '__main__':
187 | os.environ['FLASK_ENV'] = 'development'
188 | port = int(os.environ.get('PORT', 8080))
189 | app.run(debug=True, host='0.0.0.0', port=port)
190 |
--------------------------------------------------------------------------------