├── Sign_Language_Translation ├── __pycache__ │ └── unicode.cpython-38.pyc ├── create_dataset_from_video.py ├── making_video.py ├── modules │ ├── __pycache__ │ │ ├── holistic_module.cpython-38.pyc │ │ └── utils.cpython-38.pyc │ ├── holistic_module.py │ └── utils.py ├── readme.md ├── show_video.py ├── train_hand_gesture.ipynb ├── unicode.py ├── video_test_model_tflite.py └── webcam_test_model_tflite.py ├── dataset ├── output_video │ ├── ㄱ │ │ └── ㄱ_1.avi │ ├── ㄴ │ │ └── ㄴ_1.avi │ ├── ㄷ │ │ └── ㄷ_1.avi │ ├── ㄹ │ │ └── ㄹ_1.avi │ ├── ㅁ │ │ └── ㅁ_1.avi │ ├── ㅂ │ │ └── ㅂ_1.avi │ ├── ㅅ │ │ └── ㅅ_1.avi │ ├── ㅇ │ │ └── ㅇ_1.avi │ ├── ㅈ │ │ └── ㅈ_1.avi │ ├── ㅊ │ │ └── ㅊ_1.avi │ ├── ㅋ │ │ └── ㅋ_1.avi │ ├── ㅌ │ │ └── ㅌ_1.avi │ ├── ㅍ │ │ └── ㅍ_1.avi │ ├── ㅎ │ │ └── ㅎ_1.avi │ ├── ㅏ │ │ └── ㅏ_1.avi │ ├── ㅐ │ │ └── ㅐ_1.avi │ ├── ㅑ │ │ └── ㅑ_1.avi │ ├── ㅒ │ │ └── ㅒ_1.avi │ ├── ㅓ │ │ └── ㅓ_1.avi │ ├── ㅔ │ │ └── ㅔ_1.avi │ ├── ㅕ │ │ └── ㅕ_1.avi │ ├── ㅖ │ │ └── ㅖ_1.avi │ ├── ㅗ │ │ └── ㅗ_1.avi │ ├── ㅚ │ │ └── ㅚ_1.avi │ ├── ㅛ │ │ └── ㅛ_1.avi │ ├── ㅜ │ │ └── ㅜ_1.avi │ ├── ㅟ │ │ └── ㅟ_1.avi │ ├── ㅠ │ │ └── ㅠ_1.avi │ ├── ㅡ │ │ └── ㅡ_1.avi │ ├── ㅢ │ │ └── ㅢ_1.avi │ └── ㅣ │ │ └── ㅣ_1.avi ├── seq_ㄱ_1669720403.npy ├── seq_ㄱ_1669723415.npy ├── seq_ㄱ_1669724266.npy ├── seq_ㄴ_1669720403.npy ├── seq_ㄴ_1669723415.npy ├── seq_ㄴ_1669724266.npy ├── seq_ㄷ_1669720403.npy ├── seq_ㄷ_1669723415.npy ├── seq_ㄷ_1669724266.npy ├── seq_ㄹ_1669720403.npy ├── seq_ㄹ_1669723415.npy ├── seq_ㄹ_1669724266.npy ├── seq_ㅁ_1669720403.npy ├── seq_ㅁ_1669723415.npy ├── seq_ㅁ_1669724266.npy ├── seq_ㅂ_1669720403.npy ├── seq_ㅂ_1669723415.npy ├── seq_ㅂ_1669724266.npy ├── seq_ㅅ_1669720403.npy ├── seq_ㅅ_1669723415.npy ├── seq_ㅅ_1669724266.npy ├── seq_ㅇ_1669720403.npy ├── seq_ㅇ_1669723415.npy ├── seq_ㅇ_1669724266.npy ├── seq_ㅈ_1669720403.npy ├── seq_ㅈ_1669723415.npy ├── seq_ㅈ_1669724266.npy ├── seq_ㅊ_1669720403.npy ├── seq_ㅊ_1669723415.npy ├── seq_ㅊ_1669724266.npy ├── seq_ㅋ_1669720403.npy ├── seq_ㅋ_1669723415.npy ├── seq_ㅋ_1669724266.npy ├── seq_ㅌ_1669720403.npy ├── seq_ㅌ_1669723415.npy ├── seq_ㅌ_1669724266.npy ├── seq_ㅍ_1669720403.npy ├── seq_ㅍ_1669723415.npy ├── seq_ㅍ_1669724266.npy ├── seq_ㅎ_1669720403.npy ├── seq_ㅎ_1669723415.npy ├── seq_ㅎ_1669724266.npy ├── seq_ㅏ_1669720403.npy ├── seq_ㅏ_1669723415.npy ├── seq_ㅏ_1669724266.npy ├── seq_ㅐ_1669720403.npy ├── seq_ㅐ_1669723415.npy ├── seq_ㅐ_1669724266.npy ├── seq_ㅑ_1669720403.npy ├── seq_ㅑ_1669723415.npy ├── seq_ㅑ_1669724266.npy ├── seq_ㅒ_1669720403.npy ├── seq_ㅒ_1669723415.npy ├── seq_ㅒ_1669724266.npy ├── seq_ㅓ_1669720403.npy ├── seq_ㅓ_1669723415.npy ├── seq_ㅓ_1669724266.npy ├── seq_ㅔ_1669720403.npy ├── seq_ㅔ_1669723415.npy ├── seq_ㅔ_1669724266.npy ├── seq_ㅕ_1669720403.npy ├── seq_ㅕ_1669723415.npy ├── seq_ㅕ_1669724266.npy ├── seq_ㅖ_1669720403.npy ├── seq_ㅖ_1669723415.npy ├── seq_ㅖ_1669724266.npy ├── seq_ㅗ_1669720403.npy ├── seq_ㅗ_1669723415.npy ├── seq_ㅗ_1669724266.npy ├── seq_ㅚ_1669720403.npy ├── seq_ㅚ_1669723415.npy ├── seq_ㅚ_1669724266.npy ├── seq_ㅛ_1669720403.npy ├── seq_ㅛ_1669723415.npy ├── seq_ㅛ_1669724266.npy ├── seq_ㅜ_1669720403.npy ├── seq_ㅜ_1669723415.npy ├── seq_ㅜ_1669724266.npy ├── seq_ㅟ_1669720403.npy ├── seq_ㅟ_1669723415.npy ├── seq_ㅟ_1669724266.npy ├── seq_ㅠ_1669720403.npy ├── seq_ㅠ_1669723415.npy ├── seq_ㅠ_1669724266.npy ├── seq_ㅡ_1669720403.npy ├── seq_ㅡ_1669723415.npy ├── seq_ㅡ_1669724266.npy ├── seq_ㅢ_1669720403.npy ├── seq_ㅢ_1669723415.npy ├── seq_ㅢ_1669724266.npy ├── seq_ㅣ_1669720403.npy ├── seq_ㅣ_1669723415.npy └── seq_ㅣ_1669724266.npy ├── models ├── multi_hand_gesture_classifier.h5 └── multi_hand_gesture_classifier.tflite ├── readme.md ├── version_requirements.txt └── 실시간 수화 번역 인식 모듈 생성.pptx /Sign_Language_Translation/__pycache__/unicode.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/Sign_Language_Translation/__pycache__/unicode.cpython-38.pyc -------------------------------------------------------------------------------- /Sign_Language_Translation/create_dataset_from_video.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import sys, os 3 | import mediapipe as mp 4 | import numpy as np 5 | import modules.holistic_module as hm 6 | from modules.utils import createDirectory 7 | import json 8 | import time 9 | 10 | from modules.utils import Vector_Normalization 11 | 12 | createDirectory('dataset/output_video') 13 | 14 | # 저장할 파일 이름 15 | save_file_name = "train" 16 | 17 | # 시퀀스의 길이(30 -> 10) 18 | seq_length = 10 19 | 20 | 21 | actions = ['ㄱ', 'ㄴ', 'ㄷ', 'ㄹ', 'ㅁ', 'ㅂ', 'ㅅ', 'ㅇ', 'ㅈ', 'ㅊ', 'ㅋ', 'ㅌ', 'ㅍ', 'ㅎ', 22 | 'ㅏ', 'ㅑ', 'ㅓ', 'ㅕ', 'ㅗ', 'ㅛ', 'ㅜ', 'ㅠ', 'ㅡ', 'ㅣ', 23 | 'ㅐ', 'ㅒ', 'ㅔ', 'ㅖ', 'ㅢ', 'ㅚ', 'ㅟ'] 24 | 25 | dataset = dict() 26 | 27 | for i in range(len(actions)): 28 | dataset[i] = [] 29 | 30 | # MediaPipe holistic model 31 | detector = hm.HolisticDetector(min_detection_confidence=0.3) 32 | 33 | videoFolderPath = "dataset/output_video" 34 | videoTestList = os.listdir(videoFolderPath) 35 | 36 | testTargetList =[] 37 | 38 | created_time = int(time.time()) 39 | 40 | for videoPath in videoTestList: 41 | actionVideoPath = f'{videoFolderPath}/{videoPath}' 42 | actionVideoList = os.listdir(actionVideoPath) 43 | for actionVideo in actionVideoList: 44 | fullVideoPath = f'{actionVideoPath}/{actionVideo}' 45 | testTargetList.append(fullVideoPath) 46 | 47 | print("---------- Start Video List ----------") 48 | testTargetList = sorted(testTargetList, key=lambda x:x[x.find("/", 9)+1], reverse=True) 49 | print(testTargetList) 50 | print("---------- End Video List ----------\n") 51 | 52 | for target in testTargetList: 53 | 54 | data = [] 55 | first_index = target.find("/") 56 | second_index = target.find("/", first_index+1) 57 | third_index = target.find("/", second_index+1) 58 | idx = actions.index(target[target.find("/", second_index)+1:target.find("/", third_index)]) 59 | 60 | print("Now Streaming :", target) 61 | cap = cv2.VideoCapture(target) 62 | 63 | # 열렸는지 확인 64 | if not cap.isOpened(): 65 | print("Camera open failed!") 66 | sys.exit() 67 | 68 | # 웹캠의 속성 값을 받아오기 69 | # 정수 형태로 변환하기 위해 round 70 | w = round(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) 71 | h = round(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) 72 | fps = cap.get(cv2.CAP_PROP_FPS) # 카메라에 따라 값이 정상적, 비정상적 73 | print(w,h,'fps : ', fps) 74 | 75 | if fps != 0: 76 | delay = round(1000/fps) 77 | else: 78 | delay = round(1000/30) 79 | 80 | # 프레임을 받아와서 저장하기 81 | while True: 82 | ret, img = cap.read() 83 | 84 | if not ret: 85 | break 86 | 87 | img = detector.findHolistic(img, draw=True) 88 | # _, left_hand_lmList = detector.findLefthandLandmark(img) 89 | _, right_hand_lmList = detector.findRighthandLandmark(img) 90 | 91 | 92 | # if len(left_hand_lmList) != 0 and len(right_hand_lmList) != 0: 93 | # if left_hand_lmList is not None or right_hand_lmList is not None: 94 | if right_hand_lmList is not None: 95 | joint = np.zeros((42, 2)) # (21,2) 96 | 97 | # 왼손 랜드마크 리스트 98 | # for j, lm in enumerate(left_hand_lmList.landmark): 99 | # joint[j] = [lm.x, lm.y] 100 | 101 | # 오른손 랜드마크 리스트 102 | for j, lm in enumerate(right_hand_lmList.landmark): 103 | # joint[j+21] = [lm.x, lm.y] 104 | joint[j] = [lm.x, lm.y] 105 | 106 | # 벡터 정규화 107 | vector, angle_label = Vector_Normalization(joint) 108 | 109 | # 정답 라벨링 110 | angle_label = np.append(angle_label, idx) 111 | 112 | # 위치 종속성을 가지는 데이터 저장 113 | # d = np.concatenate([joint.flatten(), angle_label]) 114 | 115 | # 벡터 정규화를 활용한 위치 종속성 제거 116 | d = np.concatenate([vector.flatten(), angle_label.flatten()]) 117 | 118 | data.append(d) 119 | 120 | 121 | 122 | # draw box 123 | # cv2.rectangle(img, (0,0), (w, 30), (245, 117, 16), -1) 124 | 125 | # draw text target name 126 | # cv2.putText(img, target, (15,20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1, cv2.LINE_AA) 127 | 128 | 129 | # cv2.imshow('img', img) 130 | cv2.waitKey(delay) 131 | 132 | # esc를 누르면 강제 종료 133 | if cv2.waitKey(delay) == 27: 134 | break 135 | 136 | print("\n---------- Finish Video Streaming ----------") 137 | 138 | data = np.array(data) 139 | 140 | # Create sequence data 141 | print('len(data)-seq_lenth:', len(data) - seq_length) 142 | for seq in range(len(data) - seq_length): 143 | dataset[idx].append(data[seq:seq + seq_length]) 144 | 145 | ''' 146 | for i in range(len(actions)): 147 | save_data = np.array(dataset[i]) 148 | np.save(os.path.join('dataset', f'seq_{actions[i]}_{created_time}'), save_data) 149 | 150 | 151 | print("\n---------- Finish Save Dataset ----------") 152 | ''' 153 | 154 | 155 | 156 | -------------------------------------------------------------------------------- /Sign_Language_Translation/making_video.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import sys, os 3 | import time 4 | import mediapipe as mp 5 | from modules.utils import createDirectory 6 | import numpy as np 7 | from PIL import ImageFont, ImageDraw, Image 8 | 9 | fontpath = "fonts/HMKMMAG.TTF" 10 | font = ImageFont.truetype(fontpath, 40) 11 | 12 | createDirectory('dataset') 13 | 14 | # 손가락 통증 이슈로 나누어 찍기 15 | # actions = ['ㄱ', 'ㄴ', 'ㄷ', 'ㄹ', 'ㅁ', 'ㅂ', 'ㅅ', 'ㅇ', 'ㅈ', 'ㅊ', 'ㅋ', 'ㅌ', 'ㅍ', 'ㅎ'] 16 | # actions = ['ㅏ', 'ㅑ', 'ㅓ', 'ㅕ', 'ㅗ', 'ㅛ', 'ㅜ', 'ㅠ', 'ㅡ', 'ㅣ'] 17 | # actions = ['ㅗ', 'ㅛ', 'ㅜ'] 18 | actions = ['ㅐ', 'ㅒ', 'ㅔ', 'ㅖ', 'ㅢ', 'ㅚ', 'ㅟ'] 19 | # seq_length = 10 20 | secs_for_action = 30 21 | 22 | # MediaPipe hands model 23 | mp_hands = mp.solutions.hands 24 | mp_drawing = mp.solutions.drawing_utils 25 | hands = mp_hands.Hands( 26 | max_num_hands=1, 27 | min_detection_confidence=0.3, 28 | min_tracking_confidence=0.5) 29 | 30 | cap = cv2.VideoCapture(0) 31 | 32 | created_time = int(time.time()) 33 | 34 | 35 | # 열렸는지 확인 36 | if not cap.isOpened(): 37 | print("Camera open failed!") 38 | sys.exit() 39 | 40 | # 웹캠의 속성 값을 받아오기 41 | # 정수 형태로 변환하기 위해 round 42 | w = round(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) 43 | h = round(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) 44 | fps = cap.get(cv2.CAP_PROP_FPS) # 카메라에 따라 값이 정상적, 비정상적 45 | 46 | if fps != 0: 47 | delay = round(1000/fps) 48 | else: 49 | delay = round(1000/30) 50 | 51 | fourcc = cv2.VideoWriter_fourcc(*'DIVX') 52 | 53 | 54 | # 프레임을 받아와서 저장하기 55 | while cap.isOpened(): 56 | for idx, action in enumerate(actions): 57 | 58 | os.makedirs(f'dataset/output_video/{action}', exist_ok=True) 59 | 60 | videoFolderPath = f'dataset/output_video/{action}' 61 | videoList = sorted(os.listdir(videoFolderPath), key=lambda x:int(x[x.find("_")+1:x.find(".")])) 62 | 63 | if len(videoList) == 0: 64 | take = 1 65 | else: 66 | f = videoList[-1].find("_") 67 | e = videoList[-1].find(".") 68 | take = int(videoList[-1][f+1:e]) + 1 69 | 70 | saved_video_path = f'dataset/output_video/{action}/{action}_{take}.avi' 71 | 72 | out = cv2.VideoWriter(saved_video_path, fourcc, fps, (w, h)) 73 | 74 | ret, img = cap.read() 75 | if not ret: 76 | break 77 | 78 | # 한글 폰트 출력 79 | img_pil = Image.fromarray(img) 80 | draw = ImageDraw.Draw(img_pil) 81 | draw.text((10, 30), f'{action.upper()} 입력 대기중..', font=font, fill=(255, 255, 255)) 82 | img = np.array(img_pil) 83 | 84 | #cv2.putText(img, f'Waiting for collecting {action.upper()} action...', org=(10, 30), fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(255, 255, 255), thickness=2) 85 | cv2.imshow('img', img) 86 | cv2.waitKey(4000) 87 | 88 | start_time = time.time() 89 | 90 | while time.time() - start_time < secs_for_action: 91 | ret, img = cap.read() 92 | if not ret: 93 | break 94 | 95 | # 비디오 녹화 96 | out.write(img) 97 | 98 | img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) 99 | result = hands.process(img) 100 | img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR) 101 | 102 | if result.multi_hand_landmarks is not None: 103 | for res in result.multi_hand_landmarks: 104 | mp_drawing.draw_landmarks(img, res, mp_hands.HAND_CONNECTIONS) 105 | 106 | cv2.imshow('img', img) 107 | 108 | # esc를 누르면 강제 종료 109 | if cv2.waitKey(delay) == 27: 110 | break 111 | 112 | cap.release() 113 | out.release() 114 | cv2.destroyAllWindows() -------------------------------------------------------------------------------- /Sign_Language_Translation/modules/__pycache__/holistic_module.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/Sign_Language_Translation/modules/__pycache__/holistic_module.cpython-38.pyc -------------------------------------------------------------------------------- /Sign_Language_Translation/modules/__pycache__/utils.cpython-38.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/Sign_Language_Translation/modules/__pycache__/utils.cpython-38.pyc -------------------------------------------------------------------------------- /Sign_Language_Translation/modules/holistic_module.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import mediapipe as mp 3 | import time 4 | import math 5 | 6 | class HolisticDetector(): 7 | def __init__(self, 8 | static_image_mode=False, 9 | model_complexity=1, 10 | smooth_landmarks=True, 11 | enable_segmentation=False, 12 | smooth_segmentation=True, 13 | refine_face_landmarks=False, 14 | min_detection_confidence=0.5, 15 | min_tracking_confidence=0.5): 16 | self.static_image_mode = static_image_mode 17 | self.model_complexity = model_complexity 18 | self.smooth_landmarks = smooth_landmarks 19 | self.enable_segmentation = enable_segmentation 20 | self.smooth_segmentation = smooth_segmentation 21 | self.refine_face_landmarks = refine_face_landmarks 22 | self.min_detection_confidence = min_detection_confidence 23 | self.min_tracking_confidence = min_tracking_confidence 24 | 25 | self.mpHolistic = mp.solutions.holistic 26 | self.mpPose = mp.solutions.pose 27 | self.mpFace = mp.solutions.face_mesh 28 | self.holistics = self.mpHolistic.Holistic(self.static_image_mode, 29 | self.model_complexity, 30 | self.smooth_landmarks, 31 | self.enable_segmentation, 32 | self.smooth_segmentation, 33 | self.refine_face_landmarks, 34 | self.min_detection_confidence, 35 | self.min_tracking_confidence) 36 | self.mpDraw = mp.solutions.drawing_utils 37 | 38 | self.tipIds = [4, 8, 12, 16, 20] 39 | 40 | def findHolistic(self, img, draw = True): 41 | imgRGB = cv2.cvtColor(img, cv2.COLOR_BGR2RGB ) 42 | self.results = self.holistics.process(imgRGB) 43 | 44 | if self.results.pose_landmarks: 45 | 46 | if draw: 47 | # Draw pose, left and right hands, and face landmarks on the image. 48 | annotated_image = img.copy() 49 | 50 | # self.mpDraw.draw_landmarks( 51 | # annotated_image, self.results.face_landmarks, self.mpHolistic.FACE_CONNECTIONS) 52 | self.mpDraw.draw_landmarks( 53 | annotated_image, self.results.left_hand_landmarks, self.mpHolistic.HAND_CONNECTIONS) 54 | self.mpDraw.draw_landmarks( 55 | annotated_image, self.results.right_hand_landmarks, self.mpHolistic.HAND_CONNECTIONS) 56 | # self.mpDraw.draw_landmarks( 57 | # annotated_image, self.results.pose_landmarks, self.mpHolistic.POSE_CONNECTIONS) 58 | 59 | # Plot pose world landmarks. 60 | # self.mpDraw.plot_landmarks( 61 | # self.results.pose_world_landmarks, self.mpHolistic.POSE_CONNECTIONS) 62 | return annotated_image 63 | 64 | return img 65 | 66 | def findPoseLandmark(self, img, draw=True): 67 | xList = [] 68 | yList = [] 69 | 70 | self.pose_lmList = [] 71 | if self.results.pose_landmarks: 72 | myHolistic = self.results.pose_landmarks 73 | # print(myHolistic.landmark) 74 | # print(type(myHolistic.landmark)) 75 | for id, lm in enumerate(myHolistic.landmark): 76 | # print(id,lm) 77 | h, w, c = img.shape 78 | cx, cy, cz = int(lm.x*w), int(lm.y*h), int(lm.z*(w+h)/2) 79 | # print(id, cx, cy) 80 | # print(cz) 81 | xList.append(cx) 82 | yList.append(cy) 83 | self.pose_lmList.append([id, cx, cy, cz]) 84 | 85 | return self.pose_lmList 86 | 87 | def findFaceLandmark(self, img, draw=True): 88 | xList = [] 89 | yList = [] 90 | 91 | self.face_lmList = [] 92 | if self.results.face_landmarks: 93 | myHolistic = self.results.face_landmarks 94 | # print(type(myHolistic.landmark)) 95 | for id, lm in enumerate(myHolistic.landmark): 96 | # print(id,lm) 97 | h, w, c = img.shape 98 | cx, cy, cz = int(lm.x*w), int(lm.y*h), int(lm.z*(w+h)/2) 99 | # print(id, cx, cy) 100 | xList.append(cx) 101 | yList.append(cy) 102 | self.face_lmList.append([id, cx, cy, cz]) 103 | 104 | return self.face_lmList 105 | 106 | def findLefthandLandmark(self, img, draw=True): 107 | xList = [] 108 | yList = [] 109 | 110 | self.left_hand_lmList = [] 111 | if self.results.left_hand_landmarks: 112 | myHolistic = self.results.left_hand_landmarks 113 | 114 | for id, lm in enumerate(myHolistic.landmark): 115 | h, w, c = img.shape 116 | cx, cy, cz = int(lm.x*w), int(lm.y*h), int(lm.z*(w+h)/2) 117 | xList.append(cx) 118 | yList.append(cy) 119 | self.left_hand_lmList.append([id, cx, cy, cz]) 120 | 121 | return self.left_hand_lmList, self.results.left_hand_landmarks 122 | 123 | def findRighthandLandmark(self, img, draw=True): 124 | xList = [] 125 | yList = [] 126 | 127 | self.right_hand_lmList = [] 128 | if self.results.right_hand_landmarks: 129 | myHolistic = self.results.right_hand_landmarks 130 | for id, lm in enumerate(myHolistic.landmark): 131 | h, w, c = img.shape 132 | cx, cy, cz = int(lm.x*w), int(lm.y*h), int(lm.z*(w+h)/2) 133 | xList.append(cx) 134 | yList.append(cy) 135 | self.right_hand_lmList.append([id, cx, cy, cz]) 136 | 137 | return self.right_hand_lmList, self.results.right_hand_landmarks 138 | 139 | def left_hand_fingersUp(self, axis=False): 140 | fingers = [] 141 | 142 | if axis == False: 143 | # Thumb 144 | if self.left_hand_lmList[self.tipIds[0]][1] < self.left_hand_lmList[self.tipIds[4]][1]: 145 | if self.left_hand_lmList[self.tipIds[0]][1] < self.left_hand_lmList[self.tipIds[0] - 2][1]: 146 | fingers.append(1) 147 | else: 148 | fingers.append(0) 149 | elif self.left_hand_lmList[self.tipIds[0]][1] > self.left_hand_lmList[self.tipIds[4]][1]: 150 | if self.left_hand_lmList[self.tipIds[0]][1] > self.left_hand_lmList[self.tipIds[0] - 2][1]: 151 | fingers.append(1) 152 | else: 153 | fingers.append(0) 154 | 155 | # Fingers except Thumb 156 | for id in range(1, 5): 157 | if self.left_hand_lmList[self.tipIds[id]][2] < self.left_hand_lmList[self.tipIds[id]-2][2]: 158 | fingers.append(1) 159 | else: 160 | fingers.append(0) 161 | 162 | # axis = True( to detect LIKE gesture ) 163 | else: 164 | # Thumb 165 | if self.left_hand_lmList[self.tipIds[0]][2] < self.left_hand_lmList[self.tipIds[0] - 2][2]: 166 | fingers.append(1) 167 | else: 168 | fingers.append(0) 169 | 170 | # Fingers except Thumb 171 | if self.left_hand_lmList[self.tipIds[0]][1] < self.left_hand_lmList[self.tipIds[4]][1]: 172 | for id in range(1, 5): 173 | if self.left_hand_lmList[self.tipIds[id]][1] > self.left_hand_lmList[self.tipIds[id]-2][1]: 174 | fingers.append(1) 175 | else: 176 | fingers.append(0) 177 | else: 178 | for id in range(1, 5): 179 | if self.left_hand_lmList[self.tipIds[id]][1] < self.left_hand_lmList[self.tipIds[id]-2][1]: 180 | fingers.append(1) 181 | else: 182 | fingers.append(0) 183 | 184 | 185 | return fingers 186 | 187 | def right_hand_fingersUp(self, axis=False): 188 | fingers = [] 189 | 190 | if axis == False: 191 | # Thumb 192 | if self.right_hand_lmList[self.tipIds[0]][1] > self.right_hand_lmList[self.tipIds[4]][1]: 193 | if self.right_hand_lmList[self.tipIds[0]][1] > self.right_hand_lmList[self.tipIds[0] - 2][1]: 194 | fingers.append(1) 195 | else: 196 | fingers.append(0) 197 | if self.right_hand_lmList[self.tipIds[0]][1] < self.right_hand_lmList[self.tipIds[4]][1]: 198 | if self.right_hand_lmList[self.tipIds[0]][1] < self.right_hand_lmList[self.tipIds[0] - 2][1]: 199 | fingers.append(1) 200 | else: 201 | fingers.append(0) 202 | 203 | # Fingers except Thumb 204 | for id in range(1, 5): 205 | if self.right_hand_lmList[self.tipIds[id]][2] < self.right_hand_lmList[self.tipIds[id]-2][2]: 206 | fingers.append(1) 207 | else: 208 | fingers.append(0) 209 | 210 | # axis = True( to detect LIKE gesture ) 211 | else: 212 | # Thumb 213 | if self.right_hand_lmList[self.tipIds[0]][2] < self.right_hand_lmList[self.tipIds[0] - 2][2]: 214 | fingers.append(1) 215 | else: 216 | fingers.append(0) 217 | 218 | # Fingers except Thumb 219 | if self.right_hand_lmList[self.tipIds[0]][1] < self.right_hand_lmList[self.tipIds[4]][1]: 220 | for id in range(1, 5): 221 | if self.right_hand_lmList[self.tipIds[id]][1] > self.right_hand_lmList[self.tipIds[id]-2][1]: 222 | fingers.append(1) 223 | else: 224 | fingers.append(0) 225 | else: 226 | for id in range(1, 5): 227 | if self.right_hand_lmList[self.tipIds[id]][1] < self.right_hand_lmList[self.tipIds[id]-2][1]: 228 | fingers.append(1) 229 | else: 230 | fingers.append(0) 231 | 232 | return fingers 233 | 234 | def findCenter(self, p1, p2): 235 | x1, y1 = self.pose_lmList[p1][1:3] 236 | x2, y2 = self.pose_lmList[p2][1:3] 237 | cx, cy = (x1 + x2) // 2, (y1 + y2) // 2 238 | 239 | return cx, cy 240 | 241 | def findDistance(self, p1, p2, img, draw=True, r=15, t=3): 242 | x1, y1 = self.face_lmList[p1][1:3] 243 | x2, y2 = p2[0],p2[1] 244 | 245 | if draw: 246 | cv2.line(img, (x1, y1), (x2, y2), (255,0,255), t) 247 | cv2.circle(img, (x1, y1), r, (255,0,255), cv2.FILLED) 248 | cv2.circle(img, (x2, y2), r, (255,0,255), cv2.FILLED) 249 | length = math.hypot(x2-x1, y2-y1) 250 | 251 | return length, img 252 | 253 | def findDepth(self, p1, p2): 254 | depth = abs((self.pose_lmList[p1][3] + self.pose_lmList[p2][3]) / 2) 255 | return depth 256 | 257 | def findEyeBlink(self, p1, p2, img, draw=True, r=15, t=3): 258 | x1, y1 = self.face_lmList[p1][1:3] 259 | x2, y2 = self.face_lmList[p2][1:3] 260 | 261 | if draw: 262 | cv2.line(img, (x1, y1), (x2, y2), (255,0,255), t) 263 | cv2.circle(img, (x1, y1), r, (255,0,255), cv2.FILLED) 264 | cv2.circle(img, (x2, y2), r, (255,0,255), cv2.FILLED) 265 | length = math.hypot(x2-x1, y2-y1) 266 | 267 | return length, img 268 | 269 | def findEyeDepth(self, p1, p2): 270 | depth = abs((self.face_lmList[p1][3] + self.face_lmList[p2][3]) / 2) 271 | return depth 272 | 273 | def drawLine(self, p1, p2, img, t=3): 274 | x1, y1 = self.face_lmList[p1][1:3] 275 | x2, y2 = self.face_lmList[p2][1:3] 276 | 277 | cv2.line(img, (x1, y1), (x2, y2), (255,255,255), t) 278 | 279 | def findLength_lh_rh(self, p1, p2): 280 | x1, y1 = self.left_hand_lmList[p2][1:3] 281 | x2, y2 = self.right_hand_lmList[p1][1:3] 282 | 283 | length = math.hypot(abs(x2-x1), abs(y2-y1)) 284 | return length 285 | 286 | def findLength_lh_lh(self, p1, p2): 287 | x1, y1 = self.left_hand_lmList[p2][1:3] 288 | x2, y2 = self.left_hand_lmList[p1][1:3] 289 | 290 | length = math.hypot(abs(x2-x1), abs(y2-y1)) 291 | return length 292 | 293 | def findLength_rh_rh(self, p1, p2): 294 | x1, y1 = self.right_hand_lmList[p2][1:3] 295 | x2, y2 = self.right_hand_lmList[p1][1:3] 296 | 297 | length = math.hypot(abs(x2-x1), abs(y2-y1)) 298 | return length 299 | 300 | def findLength_pose(self, p1, p2): 301 | x1, y1 = self.pose_lmList[p2][1:3] 302 | x2, y2 = self.pose_lmList[p1][1:3] 303 | 304 | length = math.hypot(abs(x2-x1), abs(y2-y1)) 305 | return length 306 | 307 | def findAngle(self, img, p1, p2, p3, draw=True): 308 | # 랜드마크 좌표 얻기 309 | # , x1, y1 = self.lmList[p1] 310 | x1, y1 = self.pose_lmList[p1][1:3] 311 | x2, y2 = self.pose_lmList[p2][1:3] 312 | x3, y3 = self.pose_lmList[p3][1:3] 313 | 314 | # 각도 계산 315 | radian = math.atan2(y3-y2,x3-x2)-math.atan2(y1-y2,x1-x2) 316 | angle = math.degrees(radian) 317 | 318 | if angle < 0: 319 | angle += 360 320 | 321 | #print(angle) 322 | # 점, 선 그리기 323 | if draw: 324 | cv2.line(img, (x1,y1), (x2,y2), (255,255,255), 3) 325 | cv2.line(img, (x2,y2), (x3,y3), (255,255,255), 3) 326 | cv2.circle(img, (x1,y1), 10, (0,0,255), cv2.FILLED) 327 | cv2.circle(img, (x1,y1), 15, (0,0,255), 2) 328 | cv2.circle(img, (x2,y2), 10, (0,0,255), cv2.FILLED) 329 | cv2.circle(img, (x2,y2), 15, (0,0,255), 2) 330 | cv2.circle(img, (x3,y3), 10, (0,0,255), cv2.FILLED) 331 | cv2.circle(img, (x3,y3), 15, (0,0,255), 2) 332 | cv2.putText(img, str(int(angle)), (x2-50,y2+50), cv2.FONT_HERSHEY_PLAIN, 2, (0,0,255), 2) 333 | 334 | return angle 335 | 336 | def findHandAngle(self, img, p1, p2, p3, draw=True): 337 | # 랜드마크 좌표 얻기 338 | # , x1, y1 = self.lmList[p1] 339 | x1, y1 = self.right_hand_lmList[p1][1:3] 340 | x2, y2 = self.right_hand_lmList[p2][1:3] 341 | x3, y3 = self.right_hand_lmList[p3][1:3] 342 | 343 | # 각도 계산 344 | radian = math.atan2(y3-y2,x3-x2)-math.atan2(y1-y2,x1-x2) 345 | angle = math.degrees(radian) 346 | 347 | if angle < 0: 348 | angle += 360 349 | 350 | #print(angle) 351 | # 점, 선 그리기 352 | if draw: 353 | cv2.line(img, (x1,y1), (x2,y2), (255,255,255), 3) 354 | cv2.line(img, (x2,y2), (x3,y3), (255,255,255), 3) 355 | cv2.circle(img, (x1,y1), 10, (0,0,255), cv2.FILLED) 356 | cv2.circle(img, (x1,y1), 15, (0,0,255), 2) 357 | cv2.circle(img, (x2,y2), 10, (0,0,255), cv2.FILLED) 358 | cv2.circle(img, (x2,y2), 15, (0,0,255), 2) 359 | cv2.circle(img, (x3,y3), 10, (0,0,255), cv2.FILLED) 360 | cv2.circle(img, (x3,y3), 15, (0,0,255), 2) 361 | cv2.putText(img, str(int(angle)), (x2-50,y2+50), cv2.FONT_HERSHEY_PLAIN, 2, (0,0,255), 2) 362 | 363 | return angle -------------------------------------------------------------------------------- /Sign_Language_Translation/modules/utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import cv2 3 | import numpy as np 4 | 5 | def createDirectory(directory): 6 | try: 7 | if not os.path.exists(directory): 8 | os.makedirs(directory) 9 | except OSError: 10 | print("Error: Failed to create the directory.") 11 | 12 | # vector normalization 13 | def Vector_Normalization(joint): 14 | # Compute angles between joints 15 | v1 = joint[[0,1,2,3,0,5,6,7,0,9,10,11,0,13,14,15,0,17,18,19], :2] # Parent joint 16 | v2 = joint[[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20], :2] # Child joint 17 | v = v2 - v1 18 | # Normalize v 19 | v = v / np.linalg.norm(v, axis=1)[:, np.newaxis] 20 | 21 | # Get angle using arcos of dot product 22 | angle = np.arccos(np.einsum('nt,nt->n', 23 | v[[0,1,2,4,5,6,8,9,10,12,13,14,16,17,18],:], 24 | v[[1,2,3,5,6,7,9,10,11,13,14,15,17,18,19],:])) 25 | 26 | angle = np.degrees(angle) # Convert radian to degree 27 | 28 | angle_label = np.array([angle], dtype=np.float32) 29 | 30 | return v, angle_label -------------------------------------------------------------------------------- /Sign_Language_Translation/readme.md: -------------------------------------------------------------------------------- 1 | ### User Guide [예제 동작 순서] 2 | 3 | 1. making_video.py 4 | - 원하는 양 손 제스쳐 동영상을 촬영합니다. 5 | - 동영상 촬영은 30초를 기준으로 하고 중단을 원하면, "ESC" 키를 눌러 종료합니다. 6 | - 촬영을 여러번 진행할 수 있습니다. 7 | - [주의] 촬영 중 인식되는 랜드마크는 동영상에 저장되지 않습니다. 8 | 9 | 2. show_video.py (생략 가능) 10 | - 촬영한 데이터를 확인합니다. 11 | - video_path를 지정하면, 경로의 하위 모든 비디오 파일을 재생합니다. 12 | 13 | 3. create_dataset_from_video.py 14 | - 촬영한 데이터를 활용하여 양손 관절 및 각도를 시퀀스 데이터로 변환하여 npy 파일로 저장합니다. 15 | 16 | 4. train_hand_gesture.ipynb 17 | - npy file load하여 모델을 생성합니다. 18 | - keras model, tflite model 두 종류의 모델을 생성합니다. (tflite model만 활용) 19 | 20 | 5. video_test_model : videoFolderPath를 지정하여 비디오를 활용하여 테스트합니다. 21 | - tflite.py : tflite model을 테스트합니다. 22 | 23 | 6. webcam_test_model : 웹캠을 활용하여 테스트합니다. 24 | - tflite.py : tflite model을 테스트합니다. 25 | 26 | c.f) unicode.py는 자음모음 결합을 위해 사용하려 했으나 사용 x 27 | 28 | 29 |
30 | 31 | ### 데이터 정규화 방법 선택 32 | - 필요에 따라 데이터 정규화를 선택 적용합니다. (적용 or 적용 X) 33 | - 2가지 코드 중 1개만 선택합니다. 34 | 35 | ```python 36 | # 위치 종속성을 가지는 데이터 저장 37 | d = np.concatenate([joint.flatten(), angle_label]) 38 | 39 | # 정규화 벡터를 활용한 위치 종속성 제거 40 | d = np.concatenate([vector.flatten(), angle_label.flatten()]) 41 | 42 | ``` 43 | (Examples) 벡터 정규화를 적용하는 코드 44 | ```python 45 | # 위치 종속성을 가지는 데이터 저장 46 | # d = np.concatenate([joint.flatten(), angle_label]) 47 | 48 | # 정규화 벡터를 활용한 위치 종속성 제거 49 | d = np.concatenate([vector.flatten(), angle_label.flatten()]) 50 | 51 | ``` -------------------------------------------------------------------------------- /Sign_Language_Translation/show_video.py: -------------------------------------------------------------------------------- 1 | import cv2 2 | import sys, os 3 | import numpy as np 4 | from PIL import ImageFont, ImageDraw, Image 5 | 6 | fontpath = "fonts/HMKMMAG.TTF" 7 | font = ImageFont.truetype(fontpath, 40) 8 | 9 | videoFolderPath = "dataset/output_video" 10 | videoTestList = os.listdir(videoFolderPath) 11 | 12 | testTargetList =[] 13 | 14 | for videoPath in videoTestList: 15 | actionVideoPath = f'{videoFolderPath}/{videoPath}' 16 | actionVideoList = os.listdir(actionVideoPath) 17 | for actionVideo in actionVideoList: 18 | fullVideoPath = f'{actionVideoPath}/{actionVideo}' 19 | testTargetList.append(fullVideoPath) 20 | 21 | print("---------- Start Video List ----------") 22 | testTargetList = sorted(testTargetList, key=lambda x:x[x.find("/", 9)+1], reverse=True) 23 | print(testTargetList) 24 | print("---------- End Video List ----------\n") 25 | 26 | for target in testTargetList: 27 | print("Now Streaming :", target) 28 | cap = cv2.VideoCapture(target) 29 | 30 | # 열렸는지 확인 31 | if not cap.isOpened(): 32 | print("Camera open failed!") 33 | sys.exit() 34 | 35 | # 웹캠의 속성 값을 받아오기 36 | # 정수 형태로 변환하기 위해 round 37 | w = round(cap.get(cv2.CAP_PROP_FRAME_WIDTH)) 38 | h = round(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)) 39 | fps = cap.get(cv2.CAP_PROP_FPS) # 카메라에 따라 값이 정상적, 비정상적 40 | 41 | if fps != 0: 42 | delay = round(1000/fps) 43 | else: 44 | delay = round(1000/30) 45 | 46 | # 프레임을 받아와서 저장하기 47 | while True: 48 | ret, img = cap.read() 49 | 50 | if not ret: 51 | break 52 | 53 | # draw box 54 | cv2.rectangle(img, (0,0), (w, 70), (245, 117, 16), -1) 55 | 56 | # draw text target name 57 | #cv2.putText(img, target, (15,20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 1, cv2.LINE_AA) 58 | 59 | # 한글 폰트 출력 60 | img_pil = Image.fromarray(img) 61 | draw = ImageDraw.Draw(img_pil) 62 | draw.text((15,20), target[23:], font=font, fill=(0, 0, 0)) 63 | img = np.array(img_pil) 64 | 65 | 66 | cv2.imshow('img', img) 67 | cv2.waitKey(delay) 68 | 69 | # esc를 누르면 강제 종료 70 | if cv2.waitKey(delay) == 27: 71 | break 72 | 73 | 74 | cap.release() 75 | cv2.destroyAllWindows() 76 | print("\n---------- Finish Video Streaming ----------") -------------------------------------------------------------------------------- /Sign_Language_Translation/unicode.py: -------------------------------------------------------------------------------- 1 | __all__ = ["split_syllable_char", "split_syllables", 2 | "join_jamos", "join_jamos_char", 3 | "CHAR_INITIALS", "CHAR_MEDIALS", "CHAR_FINALS"] 4 | 5 | # 자음모음 결합용 6 | import itertools 7 | 8 | INITIAL = 0x001 9 | MEDIAL = 0x010 10 | FINAL = 0x100 11 | CHAR_LISTS = { 12 | INITIAL: list(map(chr, [ 13 | 0x3131, 0x3132, 0x3134, 0x3137, 0x3138, 0x3139, 14 | 0x3141, 0x3142, 0x3143, 0x3145, 0x3146, 0x3147, 15 | 0x3148, 0x3149, 0x314a, 0x314b, 0x314c, 0x314d, 16 | 0x314e 17 | ])), 18 | MEDIAL: list(map(chr, [ 19 | 0x314f, 0x3150, 0x3151, 0x3152, 0x3153, 0x3154, 20 | 0x3155, 0x3156, 0x3157, 0x3158, 0x3159, 0x315a, 21 | 0x315b, 0x315c, 0x315d, 0x315e, 0x315f, 0x3160, 22 | 0x3161, 0x3162, 0x3163 23 | ])), 24 | FINAL: list(map(chr, [ 25 | 0x3131, 0x3132, 0x3133, 0x3134, 0x3135, 0x3136, 26 | 0x3137, 0x3139, 0x313a, 0x313b, 0x313c, 0x313d, 27 | 0x313e, 0x313f, 0x3140, 0x3141, 0x3142, 0x3144, 28 | 0x3145, 0x3146, 0x3147, 0x3148, 0x314a, 0x314b, 29 | 0x314c, 0x314d, 0x314e 30 | ])) 31 | } 32 | CHAR_INITIALS = CHAR_LISTS[INITIAL] 33 | CHAR_MEDIALS = CHAR_LISTS[MEDIAL] 34 | CHAR_FINALS = CHAR_LISTS[FINAL] 35 | CHAR_SETS = {k: set(v) for k, v in CHAR_LISTS.items()} 36 | CHARSET = set(itertools.chain(*CHAR_SETS.values())) 37 | CHAR_INDICES = {k: {c: i for i, c in enumerate(v)} 38 | for k, v in CHAR_LISTS.items()} 39 | 40 | 41 | def is_hangul_syllable(c): 42 | return 0xac00 <= ord(c) <= 0xd7a3 # Hangul Syllables 43 | 44 | 45 | def is_hangul_jamo(c): 46 | return 0x1100 <= ord(c) <= 0x11ff # Hangul Jamo 47 | 48 | 49 | def is_hangul_compat_jamo(c): 50 | return 0x3130 <= ord(c) <= 0x318f # Hangul Compatibility Jamo 51 | 52 | 53 | def is_hangul_jamo_exta(c): 54 | return 0xa960 <= ord(c) <= 0xa97f # Hangul Jamo Extended-A 55 | 56 | 57 | def is_hangul_jamo_extb(c): 58 | return 0xd7b0 <= ord(c) <= 0xd7ff # Hangul Jamo Extended-B 59 | 60 | 61 | def is_hangul(c): 62 | return (is_hangul_syllable(c) or 63 | is_hangul_jamo(c) or 64 | is_hangul_compat_jamo(c) or 65 | is_hangul_jamo_exta(c) or 66 | is_hangul_jamo_extb(c)) 67 | 68 | 69 | def is_supported_hangul(c): 70 | return is_hangul_syllable(c) or is_hangul_compat_jamo(c) 71 | 72 | 73 | def check_hangul(c, jamo_only=False): 74 | if not ((jamo_only or is_hangul_compat_jamo(c)) or is_supported_hangul(c)): 75 | raise ValueError(f"'{c}' is not a supported hangul character. " 76 | f"'Hangul Syllables' (0xac00 ~ 0xd7a3) and " 77 | f"'Hangul Compatibility Jamos' (0x3130 ~ 0x318f) are " 78 | f"supported at the moment.") 79 | 80 | 81 | def get_jamo_type(c): 82 | check_hangul(c) 83 | assert is_hangul_compat_jamo(c), f"not a jamo: {ord(c):x}" 84 | return sum(t for t, s in CHAR_SETS.items() if c in s) 85 | 86 | 87 | def split_syllable_char(c): 88 | """ 89 | Splits a given korean syllable into its components. Each component is 90 | represented by Unicode in 'Hangul Compatibility Jamo' range. 91 | 92 | Arguments: 93 | c: A Korean character. 94 | 95 | Returns: 96 | A triple (initial, medial, final) of Hangul Compatibility Jamos. 97 | If no jamo corresponds to a position, `None` is returned there. 98 | 99 | Example: 100 | >>> split_syllable_char("안") 101 | ("ㅇ", "ㅏ", "ㄴ") 102 | >>> split_syllable_char("고") 103 | ("ㄱ", "ㅗ", None) 104 | >>> split_syllable_char("ㅗ") 105 | (None, "ㅗ", None) 106 | >>> split_syllable_char("ㅇ") 107 | ("ㅇ", None, None) 108 | """ 109 | check_hangul(c) 110 | if len(c) != 1: 111 | raise ValueError("Input string must have exactly one character.") 112 | 113 | init, med, final = None, None, None 114 | if is_hangul_syllable(c): 115 | offset = ord(c) - 0xac00 116 | x = (offset - offset % 28) // 28 117 | init, med, final = x // 21, x % 21, offset % 28 118 | if not final: 119 | final = None 120 | else: 121 | final -= 1 122 | else: 123 | pos = get_jamo_type(c) 124 | if pos & INITIAL == INITIAL: 125 | pos = INITIAL 126 | elif pos & MEDIAL == MEDIAL: 127 | pos = MEDIAL 128 | elif pos & FINAL == FINAL: 129 | pos = FINAL 130 | idx = CHAR_INDICES[pos][c] 131 | if pos == INITIAL: 132 | init = idx 133 | elif pos == MEDIAL: 134 | med = idx 135 | else: 136 | final = idx 137 | return tuple(CHAR_LISTS[pos][idx] if idx is not None else None 138 | for pos, idx in 139 | zip([INITIAL, MEDIAL, FINAL], [init, med, final])) 140 | 141 | 142 | def split_syllables(s, ignore_err=True, pad=None): 143 | """ 144 | Performs syllable-split on a string. 145 | 146 | Arguments: 147 | s (str): A string (possibly mixed with non-Hangul characters). 148 | ignore_err (bool): If set False, it ensures that all characters in 149 | the string are Hangul-splittable and throws a ValueError otherwise. 150 | (default: True) 151 | pad (str): Pad empty jamo positions (initial, medial, or final) with 152 | `pad` character. This is useful for cases where fixed-length 153 | strings are needed. (default: None) 154 | 155 | Returns: 156 | Hangul-split string 157 | 158 | Example: 159 | >>> split_syllables("안녕하세요") 160 | "ㅇㅏㄴㄴㅕㅇㅎㅏㅅㅔㅇㅛ" 161 | >>> split_syllables("안녕하세요~~", ignore_err=False) 162 | ValueError: encountered an unsupported character: ~ (0x7e) 163 | >>> split_syllables("안녕하세요ㅛ", pad="x") 164 | 'ㅇㅏㄴㄴㅕㅇㅎㅏxㅅㅔxㅇㅛxxㅛx' 165 | """ 166 | 167 | def try_split(c): 168 | try: 169 | return split_syllable_char(c) 170 | except ValueError: 171 | if ignore_err: 172 | return (c,) 173 | raise ValueError(f"encountered an unsupported character: " 174 | f"{c} (0x{ord(c):x})") 175 | 176 | s = map(try_split, s) 177 | if pad is not None: 178 | tuples = map(lambda x: tuple(pad if y is None else y for y in x), s) 179 | else: 180 | tuples = map(lambda x: filter(None, x), s) 181 | return "".join(itertools.chain(*tuples)) 182 | 183 | 184 | def join_jamos_char(init, med, final=None): 185 | """ 186 | Combines jamos into a single syllable. 187 | 188 | Arguments: 189 | init (str): Initial jao. 190 | med (str): Medial jamo. 191 | final (str): Final jamo. If not supplied, the final syllable is made 192 | without the final. (default: None) 193 | 194 | Returns: 195 | A Korean syllable. 196 | """ 197 | chars = (init, med, final) 198 | for c in filter(None, chars): 199 | check_hangul(c, jamo_only=True) 200 | 201 | idx = tuple(CHAR_INDICES[pos][c] if c is not None else c 202 | for pos, c in zip((INITIAL, MEDIAL, FINAL), chars)) 203 | init_idx, med_idx, final_idx = idx 204 | # final index must be shifted once as 205 | # final index with 0 points to syllables without final 206 | final_idx = 0 if final_idx is None else final_idx + 1 207 | return chr(0xac00 + 28 * 21 * init_idx + 28 * med_idx + final_idx) 208 | 209 | 210 | def join_jamos(s, ignore_err=True): 211 | """ 212 | Combines a sequence of jamos to produce a sequence of syllables. 213 | 214 | Arguments: 215 | s (str): A string (possible mixed with non-jamo characters). 216 | ignore_err (bool): If set False, it will ensure that all characters 217 | will be consumed for the making of syllables. It will throw a 218 | ValueError when it fails to do so. (default: True) 219 | 220 | Returns: 221 | A string 222 | 223 | Example: 224 | >>> join_jamos("ㅇㅏㄴㄴㅕㅇㅎㅏㅅㅔㅇㅛ") 225 | "안녕하세요" 226 | >>> join_jamos("ㅇㅏㄴㄴㄴㅕㅇㅎㅏㅅㅔㅇㅛ") 227 | "안ㄴ녕하세요" 228 | >>> join_jamos() 229 | """ 230 | last_t = 0 231 | queue = [] 232 | new_string = "" 233 | 234 | def flush(n=0): 235 | new_queue = [] 236 | while len(queue) > n: 237 | new_queue.append(queue.pop()) 238 | if len(new_queue) == 1: 239 | if not ignore_err: 240 | raise ValueError(f"invalid jamo character: {new_queue[0]}") 241 | result = new_queue[0] 242 | elif len(new_queue) >= 2: 243 | try: 244 | result = join_jamos_char(*new_queue) 245 | except (ValueError, KeyError): 246 | # Invalid jamo combination 247 | if not ignore_err: 248 | raise ValueError(f"invalid jamo characters: {new_queue}") 249 | result = "".join(new_queue) 250 | else: 251 | result = None 252 | return result 253 | 254 | for c in s: 255 | if c not in CHARSET: 256 | if queue: 257 | new_c = flush() + c 258 | else: 259 | new_c = c 260 | last_t = 0 261 | else: 262 | t = get_jamo_type(c) 263 | new_c = None 264 | if t & FINAL == FINAL: 265 | if not (last_t == MEDIAL): 266 | new_c = flush() 267 | elif t == INITIAL: 268 | new_c = flush() 269 | elif t == MEDIAL: 270 | if last_t & INITIAL == INITIAL: 271 | new_c = flush(1) 272 | else: 273 | new_c = flush() 274 | last_t = t 275 | queue.insert(0, c) 276 | if new_c: 277 | new_string += new_c 278 | if queue: 279 | new_string += flush() 280 | return new_string -------------------------------------------------------------------------------- /Sign_Language_Translation/video_test_model_tflite.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import cv2 3 | import mediapipe as mp 4 | import numpy as np 5 | import tensorflow as tf 6 | import modules.holistic_module as hm 7 | from tensorflow.keras.models import load_model 8 | import math 9 | import os 10 | from PIL import ImageFont, ImageDraw, Image 11 | from modules.utils import Vector_Normalization 12 | fontpath = "fonts/HMKMMAG.TTF" 13 | font = ImageFont.truetype(fontpath, 40) 14 | 15 | 16 | actions = ['ㄱ', 'ㄴ', 'ㄷ', 'ㄹ', 'ㅁ', 'ㅂ', 'ㅅ', 'ㅇ', 'ㅈ', 'ㅊ', 'ㅋ', 'ㅌ', 'ㅍ', 'ㅎ', 17 | 'ㅏ', 'ㅑ', 'ㅓ', 'ㅕ', 'ㅗ', 'ㅛ', 'ㅜ', 'ㅠ', 'ㅡ', 'ㅣ', 18 | 'ㅐ', 'ㅒ', 'ㅔ', 'ㅖ', 'ㅢ', 'ㅚ', 'ㅟ'] 19 | seq_length = 10 20 | 21 | # MediaPipe holistic model 22 | detector = hm.HolisticDetector(min_detection_confidence=0.3) 23 | 24 | # Load TFLite model and allocate tensors. 25 | interpreter = tf.lite.Interpreter(model_path="models/multi_hand_gesture_classifier.tflite") 26 | interpreter.allocate_tensors() 27 | 28 | # Get input and output tensors. 29 | input_details = interpreter.get_input_details() 30 | output_details = interpreter.get_output_details() 31 | 32 | # test video path 33 | videoFolderPath = "dataset/example1" 34 | videoTestList = os.listdir(videoFolderPath) 35 | 36 | testTargetList =[] 37 | 38 | for videoPath in videoTestList: 39 | actionVideoPath = f'{videoFolderPath}/{videoPath}' 40 | actionVideoList = os.listdir(actionVideoPath) 41 | for actionVideo in actionVideoList: 42 | fullVideoPath = f'{actionVideoPath}/{actionVideo}' 43 | testTargetList.append(fullVideoPath) 44 | 45 | testTargetList = sorted(testTargetList, key=lambda x:x[x.find("/", 9)+1], reverse=True) 46 | 47 | for target in testTargetList: 48 | 49 | cap = cv2.VideoCapture(target) 50 | 51 | seq = [] 52 | action_seq = [] 53 | last_action = None 54 | 55 | while cap.isOpened(): 56 | ret, img = cap.read() 57 | if not ret: 58 | break 59 | 60 | img = detector.findHolistic(img, draw=True) 61 | _, right_hand_lmList = detector.findRighthandLandmark(img) 62 | 63 | if right_hand_lmList is not None: 64 | joint = np.zeros((42, 2)) 65 | 66 | # 오른손 랜드마크 리스트 67 | for j, lm in enumerate(right_hand_lmList.landmark): 68 | joint[j] = [lm.x, lm.y] 69 | 70 | 71 | # 벡터 정규화 72 | vector, angle_label = Vector_Normalization(joint) 73 | 74 | # 위치 종속성을 가지는 데이터 저장 75 | # d = np.concatenate([joint.flatten(), angle_label]) 76 | 77 | # 정규화 벡터를 활용한 위치 종속성 제거 78 | d = np.concatenate([vector.flatten(), angle_label.flatten()]) 79 | 80 | seq.append(d) 81 | 82 | if len(seq) < seq_length: 83 | continue 84 | 85 | # Test model on random input data. 86 | # input_shape = input_details[0]['shape'] 87 | # input_data = np.array(np.random.random_sample(input_shape), dtype=np.float32) 88 | 89 | # 시퀀스 데이터와 넘파이화 90 | input_data = np.expand_dims(np.array(seq[-seq_length:], dtype=np.float32), axis=0) 91 | input_data = np.array(input_data, dtype=np.float32) 92 | 93 | # tflite 모델을 활용한 예측 94 | interpreter.set_tensor(input_details[0]['index'], input_data) 95 | interpreter.invoke() 96 | 97 | y_pred = interpreter.get_tensor(output_details[0]['index']) 98 | i_pred = int(np.argmax(y_pred[0])) 99 | conf = y_pred[0][i_pred] 100 | 101 | if conf < 0.9: 102 | continue 103 | 104 | action = actions[i_pred] 105 | action_seq.append(action) 106 | 107 | if len(action_seq) < 3: 108 | continue 109 | 110 | this_action = '?' 111 | if action_seq[-1] == action_seq[-2] == action_seq[-3]: 112 | this_action = action 113 | 114 | if last_action != this_action: 115 | last_action = this_action 116 | 117 | # 한글 폰트 출력 118 | img_pil = Image.fromarray(img) 119 | draw = ImageDraw.Draw(img_pil) 120 | 121 | # org=(int(right_hand_lmList.landmark[0].x * img.shape[1]), int(right_hand_lmList.landmark[0].y * img.shape[0] + 20)) 122 | draw.text((10,30), f'{action.upper()}', font=font, fill=(255, 255, 255)) 123 | 124 | img = np.array(img_pil) 125 | 126 | 127 | cv2.imshow('img', img) 128 | if cv2.waitKey(1) & 0xFF == 27: 129 | break 130 | 131 | -------------------------------------------------------------------------------- /Sign_Language_Translation/webcam_test_model_tflite.py: -------------------------------------------------------------------------------- 1 | import sys 2 | # sys.path.append('pingpong') 3 | # from pingpong.pingpongthread import PingPongThread 4 | import cv2 5 | import mediapipe as mp 6 | import numpy as np 7 | import tensorflow as tf 8 | import modules.holistic_module as hm 9 | from tensorflow.keras.models import load_model 10 | import math 11 | from modules.utils import Vector_Normalization 12 | from PIL import ImageFont, ImageDraw, Image 13 | # from unicode import join_jamos 14 | 15 | fontpath = "fonts/HMKMMAG.TTF" 16 | font = ImageFont.truetype(fontpath, 40) 17 | 18 | actions = ['ㄱ', 'ㄴ', 'ㄷ', 'ㄹ', 'ㅁ', 'ㅂ', 'ㅅ', 'ㅇ', 'ㅈ', 'ㅊ', 'ㅋ', 'ㅌ', 'ㅍ', 'ㅎ', 19 | 'ㅏ', 'ㅑ', 'ㅓ', 'ㅕ', 'ㅗ', 'ㅛ', 'ㅜ', 'ㅠ', 'ㅡ', 'ㅣ', 20 | 'ㅐ', 'ㅒ', 'ㅔ', 'ㅖ', 'ㅢ', 'ㅚ', 'ㅟ'] 21 | seq_length = 10 22 | 23 | # MediaPipe holistic model 24 | detector = hm.HolisticDetector(min_detection_confidence=0.3) 25 | 26 | # Load TFLite model and allocate tensors. 27 | interpreter = tf.lite.Interpreter(model_path="models/multi_hand_gesture_classifier.tflite") 28 | interpreter.allocate_tensors() 29 | 30 | # Get input and output tensors. 31 | input_details = interpreter.get_input_details() 32 | output_details = interpreter.get_output_details() 33 | 34 | cap = cv2.VideoCapture(0) 35 | 36 | seq = [] 37 | action_seq = [] 38 | last_action = None 39 | 40 | # zamo_list=[] 41 | 42 | while cap.isOpened(): 43 | ret, img = cap.read() 44 | if not ret: 45 | break 46 | 47 | img = detector.findHolistic(img, draw=True) 48 | # _, left_hand_lmList = detector.findLefthandLandmark(img) 49 | _, right_hand_lmList = detector.findRighthandLandmark(img) 50 | 51 | # if left_hand_lmList is not None and right_hand_lmList is not None: 52 | if right_hand_lmList is not None: 53 | 54 | joint = np.zeros((42, 2)) 55 | # 왼손 랜드마크 리스트 56 | # for j, lm in enumerate(left_hand_lmList.landmark): 57 | # joint[j] = [lm.x, lm.y] 58 | 59 | # 오른손 랜드마크 리스트 60 | for j, lm in enumerate(right_hand_lmList.landmark): 61 | # joint[j+21] = [lm.x, lm.y] 62 | joint[j] = [lm.x, lm.y] 63 | 64 | # 좌표 정규화 65 | # full_scale = Coordinate_Normalization(joint) 66 | 67 | # 벡터 정규화 68 | vector, angle_label = Vector_Normalization(joint) 69 | 70 | # 위치 종속성을 가지는 데이터 저장 71 | # d = np.concatenate([joint.flatten(), angle_label]) 72 | 73 | # 벡터 정규화를 활용한 위치 종속성 제거 74 | d = np.concatenate([vector.flatten(), angle_label.flatten()]) 75 | 76 | # 정규화 좌표를 활용한 위치 종속성 제거 77 | # d = np.concatenate([full_scale, angle_label.flatten()]) 78 | 79 | 80 | seq.append(d) 81 | 82 | if len(seq) < seq_length: 83 | continue 84 | 85 | # Test model on random input data. 86 | # input_shape = input_details[0]['shape'] 87 | # input_data = np.array(np.random.random_sample(input_shape), dtype=np.float32) 88 | 89 | # 시퀀스 데이터와 넘파이화 90 | input_data = np.expand_dims(np.array(seq[-seq_length:], dtype=np.float32), axis=0) 91 | input_data = np.array(input_data, dtype=np.float32) 92 | 93 | # tflite 모델을 활용한 예측 94 | interpreter.set_tensor(input_details[0]['index'], input_data) 95 | interpreter.invoke() 96 | 97 | y_pred = interpreter.get_tensor(output_details[0]['index']) 98 | i_pred = int(np.argmax(y_pred[0])) 99 | conf = y_pred[0][i_pred] 100 | 101 | if conf < 0.9: 102 | continue 103 | 104 | action = actions[i_pred] 105 | action_seq.append(action) 106 | 107 | if len(action_seq) < 3: 108 | continue 109 | 110 | this_action = '?' 111 | if action_seq[-1] == action_seq[-2] == action_seq[-3]: 112 | this_action = action 113 | 114 | if last_action != this_action: 115 | last_action = this_action 116 | ''' 117 | # 기록된 한글 파악 118 | if zamo_list[-1] != this_action: # 만약 전에 기록된 글자와 이번 글자가 다르다면 119 | zamo_list.append(this_action) 120 | 121 | zamo_str = ''.join(zamo_list) # 리스트에 있는 단어 합침 122 | unitl_action = join_jamos(zamo_str) # 합친 단어 한글로 만들기 123 | ''' 124 | 125 | # 한글 폰트 출력 126 | img_pil = Image.fromarray(img) 127 | draw = ImageDraw.Draw(img_pil) 128 | ''' 129 | draw.text((int(right_hand_lmList.landmark[0].x * img.shape[1]), int(right_hand_lmList.landmark[0].y * img.shape[0] + 20)), 130 | f'{this_action.upper()}', 131 | font=font, 132 | fill=(255, 255, 255)) 133 | ''' 134 | draw.text((10, 30), f'{action.upper()}', font=font, fill=(255, 255, 255)) 135 | 136 | img = np.array(img_pil) 137 | 138 | 139 | 140 | 141 | # cv2.putText(img, f'{this_action.upper()}', org=(int(right_hand_lmList.landmark[0].x * img.shape[1]), int(right_hand_lmList.landmark[0].y * img.shape[0] + 20)), fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(255, 255, 255), thickness=2) 142 | 143 | 144 | cv2.imshow('img', img) 145 | if cv2.waitKey(1) & 0xFF == 27: 146 | break 147 | 148 | -------------------------------------------------------------------------------- /dataset/output_video/ㄱ/ㄱ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㄱ/ㄱ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㄴ/ㄴ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㄴ/ㄴ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㄷ/ㄷ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㄷ/ㄷ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㄹ/ㄹ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㄹ/ㄹ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅁ/ㅁ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅁ/ㅁ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅂ/ㅂ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅂ/ㅂ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅅ/ㅅ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅅ/ㅅ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅇ/ㅇ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅇ/ㅇ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅈ/ㅈ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅈ/ㅈ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅊ/ㅊ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅊ/ㅊ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅋ/ㅋ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅋ/ㅋ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅌ/ㅌ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅌ/ㅌ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅍ/ㅍ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅍ/ㅍ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅎ/ㅎ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅎ/ㅎ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅏ/ㅏ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅏ/ㅏ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅐ/ㅐ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅐ/ㅐ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅑ/ㅑ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅑ/ㅑ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅒ/ㅒ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅒ/ㅒ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅓ/ㅓ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅓ/ㅓ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅔ/ㅔ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅔ/ㅔ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅕ/ㅕ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅕ/ㅕ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅖ/ㅖ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅖ/ㅖ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅗ/ㅗ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅗ/ㅗ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅚ/ㅚ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅚ/ㅚ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅛ/ㅛ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅛ/ㅛ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅜ/ㅜ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅜ/ㅜ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅟ/ㅟ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅟ/ㅟ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅠ/ㅠ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅠ/ㅠ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅡ/ㅡ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅡ/ㅡ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅢ/ㅢ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅢ/ㅢ_1.avi -------------------------------------------------------------------------------- /dataset/output_video/ㅣ/ㅣ_1.avi: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/output_video/ㅣ/ㅣ_1.avi -------------------------------------------------------------------------------- /dataset/seq_ㄱ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄱ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㄱ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄱ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㄱ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄱ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㄴ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄴ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㄴ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄴ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㄴ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄴ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㄷ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄷ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㄷ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄷ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㄷ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄷ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㄹ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄹ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㄹ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄹ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㄹ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㄹ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅁ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅁ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅁ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅁ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅁ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅁ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅂ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅂ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅂ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅂ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅂ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅂ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅅ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅅ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅅ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅅ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅅ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅅ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅇ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅇ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅇ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅇ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅇ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅇ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅈ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅈ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅈ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅈ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅈ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅈ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅊ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅊ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅊ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅊ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅊ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅊ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅋ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅋ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅋ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅋ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅋ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅋ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅌ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅌ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅌ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅌ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅌ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅌ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅍ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅍ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅍ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅍ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅍ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅍ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅎ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅎ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅎ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅎ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅎ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅎ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅏ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅏ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅏ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅏ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅏ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅏ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅐ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅐ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅐ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅐ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅐ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅐ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅑ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅑ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅑ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅑ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅑ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅑ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅒ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅒ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅒ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅒ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅒ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅒ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅓ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅓ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅓ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅓ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅓ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅓ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅔ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅔ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅔ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅔ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅔ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅔ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅕ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅕ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅕ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅕ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅕ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅕ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅖ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅖ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅖ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅖ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅖ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅖ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅗ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅗ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅗ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅗ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅗ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅗ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅚ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅚ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅚ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅚ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅚ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅚ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅛ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅛ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅛ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅛ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅛ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅛ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅜ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅜ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅜ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅜ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅜ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅜ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅟ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅟ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅟ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅟ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅟ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅟ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅠ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅠ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅠ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅠ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅠ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅠ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅡ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅡ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅡ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅡ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅡ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅡ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅢ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅢ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅢ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅢ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅢ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅢ_1669724266.npy -------------------------------------------------------------------------------- /dataset/seq_ㅣ_1669720403.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅣ_1669720403.npy -------------------------------------------------------------------------------- /dataset/seq_ㅣ_1669723415.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅣ_1669723415.npy -------------------------------------------------------------------------------- /dataset/seq_ㅣ_1669724266.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/dataset/seq_ㅣ_1669724266.npy -------------------------------------------------------------------------------- /models/multi_hand_gesture_classifier.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/models/multi_hand_gesture_classifier.h5 -------------------------------------------------------------------------------- /models/multi_hand_gesture_classifier.tflite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/models/multi_hand_gesture_classifier.tflite -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | ## ***실시간 수어 번역 인식 모듈생성 (with. MediaPipe, LSTM)*** 2 | *** 3 | ### Team Project 4 | 5 | #### Teaming 최준용, 이태범, 서동혁 ***(in Kookmin_University)*** 6 | 7 | *** 8 | 9 | - 실시간 영상 및 녹화 동영상을 활용해 지문자 인식 프로그램 생성 10 | 11 | - 활용방안 12 | - 수어 사용자를 위한 학습 보조 프로그램 개발 가능 13 | - 실시간 수어 사용자와의 의사소통 프로그램 개발 가능 14 | 15 | - 수어종류 16 | - 자모음(31개에 대한 한글 자모 지문자) 17 | 18 | 19 | ![지문자_이미지](https://user-images.githubusercontent.com/90700892/209419165-fd373820-e70c-4a1b-b2a4-c82439db4c1c.jpg) 20 | 21 | 출처 : https://www.urimal.org/1222 22 | 23 | *** 24 | 25 | ### ***데이터 수집*** 26 | 27 | - 31개의 자음, 모음에 대한 팀원 3명의 학습영상 촬영 28 | 29 | ![팀원3명데이터생성](https://user-images.githubusercontent.com/90700892/209419274-abda09a1-fd5b-4a8e-b018-2c04209db293.gif) 30 | 31 | 각각 'ㅎ' , 'ㅏ' , 'ㄱ' 에 대한 data 생성중... 32 | 33 | *** 34 | 35 | ### ***데이터 전처리*** 36 | 37 | ![hand_landmarks](https://user-images.githubusercontent.com/90700892/209419270-aad3fcde-48b7-40cf-b2f6-a38edeee1e89.png) 38 | 39 | 출처 : https://google.github.io/mediapipe/solutions/hands.html 40 | 41 | 수집한 영상 데이터에서 위와 같이 각 hand keypoint의 Vector, Angle 값을 인식해 데이터로 사용하게 됩니다. 42 | 43 | 본 프로젝트에서는 지문자를 활용하기 때문에 한손 keypoint만 활용하였습니다. 44 | 45 | *** 46 | 47 | ### ***Pipeline*** 48 | 49 | - making_video.py 50 | - 원하는 자,모음을 설정해 동영상을 생성합니다. (openCV 활용) 51 | 52 | - create_dataset_from_video.py 53 | - video data를 사용하여 hand keypoint의 Vector, Angle 값을 sequence data로 변환해 npy 파일로 저장합니다. 54 | 55 | - train_hand_gesture.ipynb 56 | - npy file load하여 모델을 생성합니다. 57 | 58 | - video_test_model_tflite.py 59 | - videoFolderPath를 지정하여 저장된 비디오를 활용하여 테스트합니다. 60 | 61 | - webcam_test_model_tflite.py 62 | - webcam을 활용하여 실시간으로 테스트합니다. 63 | 64 | *** 65 | 66 | ### ***시연*** 67 | 68 | #### Using webcam 69 | 70 | ![KakaoTalk_20221210_214325078](https://user-images.githubusercontent.com/90700892/209419238-00fee2f8-179d-4632-9e84-a1c18cf81b94.gif) 71 | 72 | *** 73 | 74 | ### ***결론*** 75 | 76 | 저작권 문제로 여기엔 넣지 못했지만 타 youtube 동영상에 대입시켜본 결과 정확히 지문자를 분류 하는 성능을 보였습니다. 77 | 추가적으로 왼쪽 위에 뜨는 문자들을 결합해 text data까지 생성시킬 수 있도록 project를 향후 발전 시킬 계획에 있습니다. 78 | -------------------------------------------------------------------------------- /version_requirements.txt: -------------------------------------------------------------------------------- 1 | tensorflow==2.4.0 2 | sklearn 3 | numpy 4 | opencv-python 5 | mediapipe -------------------------------------------------------------------------------- /실시간 수화 번역 인식 모듈 생성.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/JunYong-Choi/Sign_Language_Translation/d3374ddcc6b5532e683db2275e7ad2074dc9627d/실시간 수화 번역 인식 모듈 생성.pptx --------------------------------------------------------------------------------