├── 00.pre-work ├── README.md └── images │ ├── images01.png │ ├── images02.png │ ├── images03.png │ ├── images04.png │ ├── images05.png │ ├── images06.png │ ├── images07.png │ └── images08.png ├── 01.Provision and CRUD ├── .DS_Store ├── README.md ├── application │ ├── .gitignore │ ├── connect.js │ ├── findOne.js │ ├── insertMany.js │ ├── insertOne.js │ ├── package-lock.json │ ├── package.json │ ├── removeUser.js │ ├── updateHobbies.js │ └── updateOne.js └── images │ ├── .DS_Store │ ├── image01.png │ ├── image02.png │ ├── image03.png │ ├── image04.png │ ├── image05.png │ ├── image06.png │ ├── image07.png │ ├── image08.png │ ├── image09.png │ ├── image10.png │ ├── image11.png │ ├── image12.png │ ├── image13.png │ ├── image14.png │ ├── image15.png │ ├── image16.png │ ├── image17.png │ ├── image20.png │ └── image21.png ├── 02.Document Model ├── README.md └── application │ ├── bucket.js │ ├── computed.js │ ├── package-lock.json │ ├── package.json │ ├── version1.js │ └── version2.js ├── 03.index and aggregation ├── .DS_Store ├── README.md ├── application │ ├── .gitignore │ ├── aggregation.js │ ├── package-lock.json │ └── package.json └── images │ ├── image01.png │ ├── image02.png │ ├── image03.png │ ├── image04.png │ ├── image05.png │ ├── image06.png │ ├── image07.png │ ├── image08.png │ ├── image09.png │ ├── image10.png │ ├── image11.png │ └── image12.png ├── 04.atlas-search ├── .DS_Store ├── .gitignore ├── README.md ├── app.js ├── bin │ └── www ├── data │ └── dump.tar.gz ├── images │ ├── .DS_Store │ ├── image01.png │ ├── image02.png │ ├── image03.png │ ├── image04.png │ ├── image05.png │ ├── image06.png │ ├── image07.png │ ├── image08.png │ ├── image09.png │ ├── image10.png │ ├── image11.png │ ├── image12.png │ ├── image13.png │ ├── image14.png │ ├── image15.png │ ├── image16.png │ ├── image20.png │ ├── image21.png │ ├── image22.png │ ├── image23.png │ ├── image24.png │ ├── image25.png │ ├── image26.png │ ├── image27.png │ ├── image28.png │ ├── image29.png │ ├── image30.png │ ├── image31.png │ ├── image32.png │ ├── image33.png │ └── image34.png ├── labs │ ├── lab1.js │ ├── lab2.js │ ├── lab3.js │ ├── lab4.js │ ├── lab5.js │ ├── lab6.js │ └── lab7.js ├── package.json ├── public │ ├── assets │ │ └── favicon.ico │ ├── index.html │ ├── javascripts │ │ ├── bootstrap.min.js │ │ └── scripts.js │ └── stylesheets │ │ ├── bootstrap.min.css │ │ ├── custom.css │ │ └── styles.css ├── routes │ ├── index.js │ ├── prebuilt_functions.js │ └── users.js ├── sample_config.js └── test │ ├── lab1.js │ ├── lab2.js │ ├── lab3.js │ ├── lab4.js │ ├── lab5.js │ ├── lab6.js │ └── lab7.js ├── 05.atlas-serverless ├── .gitignore ├── 01.dataapi-function │ ├── README.md │ ├── img-app │ │ ├── 01.app-template.png │ │ ├── 02.app-sel.png │ │ ├── 03.app-dash.png │ │ ├── 04.env.png │ │ ├── 05.env-rev-deploy.png │ │ └── 06.env-deployed.png │ ├── img-cu │ │ ├── 01.unahor-req-rule.png │ │ ├── 01.unauthor-req-fail.png │ │ ├── 02.auth-uid-no-apikey.png │ │ ├── 02.auth-uid-ok.png │ │ ├── 02.auth-uid-save.png │ │ ├── 02.auth-uid-select-user.png │ │ ├── 02.auth-uid-select.png │ │ ├── 03.filter-config.png │ │ ├── 03.filter-nodocs.png │ │ ├── 03.filter-query.png │ │ └── 03.filter-start.png │ ├── img-data │ │ ├── 01.start.png │ │ ├── 02.config.png │ │ ├── 03.deploy.png │ │ ├── 04.rule.png │ │ ├── 05.rule-deploy.png │ │ ├── 06.auth-apikey-create.png │ │ ├── 06.auth-apikey-deploy.png │ │ ├── 06.auth-apikey-save.png │ │ ├── 06.auth-apikey.png │ │ ├── 07.var-apikey.png │ │ └── 08.req.png │ ├── img-fn │ │ ├── 01.add-hep.png │ │ ├── 02.add-hep-url.png │ │ ├── 03.new-fn.png │ │ ├── 04.new-fn-param.png │ │ ├── 05.new-fn-res.png │ │ ├── 06.save.png │ │ ├── 07.query-ep.png │ │ ├── 08.pm-new-ws.png │ │ ├── 09.pm-import.png │ │ ├── 10.pm-folder.png │ │ ├── 11.pm-import-2.png │ │ └── 12.pm-env.png │ └── postman │ │ ├── auth-apiKey.postman_collection.json │ │ ├── customHTTPsEndpoint.postman_collection.json │ │ ├── unauth-apiKey.postman_collection.json │ │ └── workspace.postman_globals.json ├── 03.federation-onlinearchive │ ├── README.md │ ├── img-fed │ │ ├── 01.enter-atlasui.png │ │ ├── 02.enter-fed.png │ │ ├── 03.rename.png │ │ ├── 04.cluster.png │ │ ├── 05.add-cluster.png │ │ ├── 06.enable-s3.png │ │ ├── 07.add-s3.png │ │ ├── 08.enter-cluster.png │ │ ├── 09.query-cluster.png │ │ ├── 10.update-doc.png │ │ ├── 11.connect-fed-2.png │ │ ├── 11.connect-fed-3.png │ │ ├── 11.connect-fed.png │ │ ├── 12.query-fed.png │ │ └── 13.query-s3.png │ └── img-ola │ │ ├── 01.enter-ola.png │ │ ├── 02.rule-date.png │ │ ├── 02.rule-query.png │ │ ├── 03.keys.png │ │ ├── 04.begin.png │ │ ├── 05.done.png │ │ ├── 06.query-cluster.png │ │ ├── 07.conn-ola-2.png │ │ ├── 07.conn-ola.png │ │ └── 08.query-ola.png └── README.md └── README.md /00.pre-work/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | # MongoDB Atlas Hands-on Training 5 | 6 | ### Pre Work 7 | 8 | 9 | #### Atlas Account 10 | MongoDB Atlas 의 무료 계정을 생성 합니다. 11 | Atlas는 관리형 데이터 베이스로 간편하게 데이터 베이스를 생성 하고 인터넷을 통한 엑세스로 편리하게 사용 할 수 있습니다. 12 | 13 | 계정 생성을 위해 Atlas 사이트에 접속 합니다. 14 | 15 | https://www.mongodb.com/ko-kr/cloud/atlas/register 16 | 17 | 신용카드 입력 없이 계정을 생성 할 수 있습니다. 기존 계정을 가지고 있는 경우 2개의 Freetier 데이터베이스 클러스터까지 생성 가능 하며 Hands-on 과정도 Free-tier를 이용하게 됩니다. 18 | 19 | #### Database 생성 20 | Atlas에 로그인 후 테스트용 데이터 베이스를 생성 합니다. 21 | 로그인 후 Deployment 메뉴에 Database 를 클릭 합니다. 오른쪽 화면에 생성되어 진 데이터 베이스 정보를 볼 수 있으며 최초에는 데이터 베이스가 없음으로 Create를 클릭 하여 데이터베이스 클러스터를 생성 합니다. 22 | 23 | 24 | 클러스터 사양을 선택 할 수 있으며 무료로 사용 할 수 있는 Shared를 선택 하고 Cloud Provider로 AWS를 선택 하고 지역은 Seoul을 선택 합니다. 25 | 26 | 27 | Cluster Tier 는 M0 Sandbox 를 선택 합니다 (M2, M5는 추가 금액이 소요 됩니다.) 28 | 29 | 30 | Cluster Name을 입력 하고 Create Cluster를 클릭 하여 데이터 베이스를 생성합니다. (소요시간은 대략 10분이내가 소요 됩니다.) 31 | 32 | 33 | #### Database Account 생성 34 | Atlas 데이터베이스 클러스터를 접근하기 위한 계정 생성으로 Security 메뉴에 Database Access를 클릭 하여 계정을 생성 할 수 있습니다. 35 | Hands-on에서는 Id/password를 이용하는 방식의 데이터베이스 계정을 생성 합니다. 36 | 37 | 계정은 atlas-account로 하여 생성 합니다. Built-in Role 은 편의상 Read and Write to any database 를 선택합니다. 38 | 39 | 40 | #### Network Access 생성 41 | 데이터 베이스 접근 테스트를 위해서 접근 하려는 컴퓨터의 IP 주소를 방화벽에 허용 해 주어야 합니다. 42 | Security의 Network Access메뉴를 선택 합니다. 43 | 44 | Add IP Address를 클릭하고 Add IP Access List Entry 에서 Add current IP Address를 클릭하하고 Confirm을 선택 합니다. 45 | 방화벽 설정은 1분 가량의 시간이 소요 됩니다. 46 | 47 | 48 | #### 초기 데이터 로드 49 | 생성된 데이터 베이스 클러스터에 초기 샘플 데이터를 적재하여 Hands on을 진행 합니다. 50 | 51 | 52 | 53 | Database 메뉴를 클릭 하면 생성된 데이터 베이스 클러스터를 볼 수 있습니다. 최초에는 데이터가 없음으로 클러스터 메뉴 버튼을 "..."을 클릭 하면 추가 메뉴 중 Load Sample Dataset 을 선택 합니다. 54 | 생성이 완료된 후 Browse Collections를 클릭하먄 데이터를 볼 수 있습니다. 55 | 생성된 데이터 베이스는 sample_airbnb외 8개의 데이터베이스가 생성 되고 최소 1개 이상의 컬렉션(테이블)이 생성되게 됩니다. 56 | 57 | 58 | 59 | #### 기타 필요한 소프트웨어 60 | 클라이언트 애플리케이션 테스트를 위한 Nodejs 필요합니다. 61 | MongoDB에 접속하고 데이터를 조회 하는 GUI Tool (Compass)를 다운로드 합니다. 62 | 63 | Nodejs : 64 | https://nodejs.org/en/download/ 65 | 66 | Compass : 67 | https://www.mongodb.com/products/compass 68 | 69 | Mongosh : 70 | https://www.mongodb.com/docs/mongodb-shell/install/ 71 | 72 | -------------------------------------------------------------------------------- /00.pre-work/images/images01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/00.pre-work/images/images01.png -------------------------------------------------------------------------------- /00.pre-work/images/images02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/00.pre-work/images/images02.png -------------------------------------------------------------------------------- /00.pre-work/images/images03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/00.pre-work/images/images03.png -------------------------------------------------------------------------------- /00.pre-work/images/images04.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/00.pre-work/images/images04.png -------------------------------------------------------------------------------- /00.pre-work/images/images05.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/00.pre-work/images/images05.png -------------------------------------------------------------------------------- /00.pre-work/images/images06.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/00.pre-work/images/images06.png -------------------------------------------------------------------------------- /00.pre-work/images/images07.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/00.pre-work/images/images07.png -------------------------------------------------------------------------------- /00.pre-work/images/images08.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/00.pre-work/images/images08.png -------------------------------------------------------------------------------- /01.Provision and CRUD/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/.DS_Store -------------------------------------------------------------------------------- /01.Provision and CRUD/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | # MongoDB Atlas Hands-on Training 5 | 6 | ### [→ Cluster Provision](#Provision) 7 | 8 | ### [→ CRUD with Nodejs](#CRUD) 9 | 10 | ### [→ CRUD with Mongosh](#MONGOSH) 11 | 12 | ### [→ Compass 를 이용한 데이터 확인](#Compass) 13 | 14 | ### [→ 추가 Query](#option) 15 | 16 |
17 | 18 | ### Provision 19 | Pre-Work에 나온 바와 같이 Atlas database를 배포 하여 줍니다. 20 | - [Prew-Work](/00.pre-work/README.md) 21 | 22 | 23 | ### CRUD 24 | 25 | Nodejs로 Atlas 에 접속 하고 MongoDB Query 를 이용하여 데이터를 생성, 조회, 삭제를 테스트 합니다. 26 | 코드는 application 폴더에 있으며 실행을 위해서는 NodeJS를 설치하고 테스트를 위해 관련 패키지를 설치 하여 줍니다. 27 | 28 | ```` 29 | % npm install 30 | 31 | added 196 packages, and audited 197 packages in 2s 32 | 33 | 14 packages are looking for funding 34 | run `npm fund` for details 35 | 36 | found 0 vulnerabilities 37 | ```` 38 | node_modules 폴더가 생성되어 관련된 라이브러리가 설치 됩니다. 39 | 40 | 41 | #### Connection Test 42 | 43 | MongoDB Atlas 와 연결을 위한 테스트 입니다. 44 | MongoDB atlas Console에 접근 주소를 얻어야 합니다. 45 | 접속 주소를 얻기 위해 Console에 로그인합니다. 46 | 데이터베이스 클러스터의 Connect 버튼을 클릭 합니다. 47 | 48 | 49 | 50 | 51 | 접근방법을 선택 하여 주는 단계에서 Connect your application를 선택 하면 접근 주소를 얻을 수 있습니다. 52 | 53 | 54 | 55 | Driver는 Node.js를 선택 하고 버젼은 4.1 or later를 선택 하여 주면 연결을 위한 Connection String이 생성 됩니다. 56 | 57 | 58 | 59 | 60 | connect.js 에 const uri을 수정 하여 줍니다. 생성한 Database Account 와 비밀 번호를 수정하여 줍니다. 만약 비밀번호에 특수문자가 포함되어있는 경우 ('@','#', '!' 등) HTML URI encoding이 필요합니다. 61 | (https://www.w3schools.com/tags/ref_urlencode.ASP) 62 | 63 | ```` 64 | const uri =mongodb+srv://atlas-account:@cluster0.****.mongodb.net/myFirstDatabase?retryWrites=true&w=majority 65 | ```` 66 | 연결 테스트를 위해 다음을 실행 합니다. 67 | 68 | ```` 69 | % node connect.js 70 | Connected successfully to server 71 | ```` 72 | 73 | #### Insert Test 74 | 75 | MongoDB Atlas 와 연결하여 데이터를 생성 합니다. 76 | insertOne.js 에 const uri을 수정 하여 줍니다. 77 | 78 | ```` 79 | const uri =mongodb+srv://atlas-account:@cluster0.****.mongodb.net/myFirstDatabase?retryWrites=true&w=majority 80 | ```` 81 | 입력할 데이터를 수정 하여 줍니다. 82 | 83 | ```` 84 | const newUser = <>; // query를 수정 85 | 86 | const newUser = { 87 | ssn:"123-456-0001", 88 | email:"user@email.com", 89 | name:"Gildong Hong", 90 | DateOfBirth: "1st Jan.", 91 | Hobbies:["Martial arts"], 92 | Addresses:[{"Address Name":"Work","Street":"431, Teheran-ro GangNam-gu ","City":"Seoul", "Zip":"06159"}], 93 | Phones:[{"type":"mobile","number":"010-5555-1234"}] 94 | }; 95 | ```` 96 | 97 | 입력 테스트를 위해 다음을 실행 합니다. 98 | 99 | ```` 100 | % node insertOne.js 101 | A document was inserted with the _id: 63bba1f8e554c42df82f974e 102 | ```` 103 | Atlas Console 에서 데이터 생성 여부를 확인 합니다. 104 | 105 | 106 | #### find Test 107 | 108 | MongoDB Atlas 와 연결하여 데이터를 조회 합니다. 109 | findeOne.js 에 const uri을 수정 하여 줍니다. 110 | 111 | ```` 112 | const uri =mongodb+srv://atlas-account:@cluster0.****.mongodb.net/myFirstDatabase?retryWrites=true&w=majority 113 | ```` 114 | 입력할 데이터를 수정 하여 줍니다. 115 | 조회할 데이터의 ssn을 확인 합니다. 116 | 117 | ````` 118 | const query = {ssn:"123-456-0001"}; 119 | ````` 120 | 121 | 데이터를 조회 합니다 122 | ```` 123 | % node findOne.js 124 | Find One Record: 63bba1f8e554c42df82f974e 125 | Find One Record by SSN: 63bba1f8e554c42df82f974e 126 | ```` 127 | 128 | #### Update Test 129 | 130 | MongoDB Atlas 와 연결하여 데이터를 업데이트 합니다. 131 | updateOne.js 에 const uri을 수정 하여 줍니다. 132 | 133 | 134 | ```` 135 | const uri =mongodb+srv://atlas-account:@cluster0.****.mongodb.net/myFirstDatabase?retryWrites=true&w=majority 136 | ```` 137 | 수정할 데이터를 ssn을 입력 하여 줍니다. 138 | 수정 대상 데이터의 ssn 및 수정할 데이터 항목을 확인 수정 하여 줍니다. 139 | ````` 140 | const query = {"ssn":"123-456-0001"}; 141 | const updatedata ={$set:{email:"gildong@email.com"}}; 142 | 143 | const result = await userCollection.updateOne(query, updatedata); 144 | 145 | ````` 146 | 147 | 데이터를 수정 합니다 148 | ```` 149 | % node updateOne.js 150 | 1 document(s) matched the filter, updated 0 document(s) 151 | ```` 152 | 153 | #### Update Hobbies Test 154 | 155 | 156 | MongoDB Atlas 와 연결하여 데이터를 업데이트 합니다. 157 | updateHobbies.js 에 const uri을 수정 하여 줍니다. 158 | 159 | ```` 160 | const uri =mongodb+srv://atlas-account:@cluster0.****.mongodb.net/myFirstDatabase?retryWrites=true&w=majority 161 | ```` 162 | 수정할 데이터를 ssn을 입력 하여 줍니다. 163 | 수정 대상 데이터의 ssn 및 Hobby 항목을 추가 하여 줍니다. (취미로 Reading 추가 하기) 164 | ````` 165 | 166 | const query = {"ssn":"123-456-0001"}; 167 | const updatedata ={$push:{Hobbies:"Reading"}}; 168 | 169 | const result = await userCollection.updateOne(query, updatedata); 170 | 171 | ````` 172 | 173 | 데이터를 수정 합니다 174 | ```` 175 | node updateHobbies.js 176 | 1 document(s) matched the filter, updated 1 document(s) 177 | ```` 178 | Atlas Data Console에서 데이터가 수정 된 것을 확인 합니다. 179 | 180 | 181 | #### Remove Test 182 | 183 | 184 | MongoDB Atlas 와 연결하여 데이터를 삭제 합니다. 185 | removeUser.js 에 const uri을 수정 하여 줍니다. 186 | 187 | ```` 188 | const uri =mongodb+srv://atlas-account:@cluster0.****.mongodb.net/myFirstDatabase?retryWrites=true&w=majority 189 | ```` 190 | 삭제할 데이터를 수정 하여 줍니다. 191 | 삭제할 데이터의 ssn 및 입력 하여줍니다. 192 | ````` 193 | 194 | const qeury = {"ssn":"123-456-0001"}; 195 | 196 | const result = await userCollection.deleteOne(qeury); 197 | 198 | ````` 199 | 200 | 데이터를 삭제 합니다 201 | ```` 202 | % node removeUser.js 203 | 1 document(s) removed 204 | ```` 205 | 206 | 207 | ### MONGOSH 208 | 209 | Mongosh로 Atlas 에 접속 하고 MongoDB Query 를 이용하여 데이터를 생성, 조회, 삭제를 테스트 합니다. NodeJS에 익숙하지 않은 경우 이를 이용하여 테스트 합니다. 210 | 211 | 212 | #### Connection 213 | 214 | MongoDB Atlas 와 Mongosh을 이용하여 연결 합니다. 215 | MongoDB atlas Mongosh 접근 주소를 얻어야 합니다. 216 | 접속 주소를 얻기 위해 Console에 로그인합니다. 217 | 데이터베이스 클러스터의 Connect 버튼을 클릭 합니다. 218 | 219 | 220 | 221 | 222 | 접근방법을 선택 하여 주는 단계에서 Shell을 선택 하면 접근 주소를 얻을 수 있습니다. 223 | 224 | 225 | 226 | Mongosh이 설치 되어 있음으로 I have the MongoDB Shell installed를 선택하고 계정 접근은 암호로 접근할 것임으로 Password를 선택하면 접근 할 수 있는 주소를 얻을 수 있습니다. 227 | 228 | 229 | 230 | 231 | Terminal을 열고 해당 주소를 이용하여 mongosh를 실행 하여 줍니다. (접근하기 위한 Account로 입력 하여 줍니다.) 232 | 233 | ```` 234 | % mongosh "mongodb+srv://cluster0.5qjlg.mongodb.net/myFirstDatabase" --apiVersion 1 --username admin 235 | Enter password: ********** 236 | Current Mongosh Log ID: 64454459813babb209a83f4c 237 | Connecting to: mongodb+srv://cluster0.5qjlg.mongodb.net/myFirstDatabase 238 | Using MongoDB: 6.0.5 (API Version 1) 239 | Using Mongosh: 1.0.5 240 | 241 | For mongosh info see: https://docs.mongodb.com/mongodb-shell/ 242 | 243 | Atlas atlas-t0pzlo-shard-0 [primary] myFirstDatabase> 244 | ```` 245 | 246 | #### Insert Test 247 | 248 | Mongosh을 이용하여 Atlas와 연결하여 데이터를 생성 합니다. 249 | 250 | 먼저 데이터베이스를 선택하여야 합니다. 251 | ```` 252 | Atlas atlas-t0pzlo-shard-0 [primary] myFirstDatabase> use samsungheavy 253 | switched to db samsungheavy 254 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> 255 | ```` 256 | 257 | 입력할 데이터를 생성하여 줍니다. (변수로 newUser를 만들어 줍니다) 258 | 259 | ```` 260 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> let newUser= { 261 | ssn:"123-456-0001", 262 | email:"user@email.com", 263 | name:"Gildong Hong", 264 | DateOfBirth: "1st Jan.", 265 | Hobbies:["Martial arts"], 266 | Addresses:[{"Address Name":"Work","Street":"431, Teheran-ro GangNam-gu ","City":"Seoul", "Zip":"06159"}], 267 | Phones:[{"type":"mobile","number":"010-5555-1234"}] 268 | }; 269 | ```` 270 | 271 | 다음 데이터 베이스 명령으로 데이터를 생성 합니다. 272 | 273 | ```` 274 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> db.handson.insertOne(newUser) 275 | { 276 | acknowledged: true, 277 | insertedId: ObjectId("64454591813babb209a83f4d") 278 | } 279 | 280 | ```` 281 | Atlas Console 에서 데이터 생성 여부를 확인 합니다. 282 | 283 | 284 | #### find Test 285 | 286 | Mongosh을 이용하여 Atlas와 연결하여 데이터를 조회 합니다. 287 | 288 | 먼저 데이터베이스를 선택하여야 합니다. (이미 해당 데이터베이스를 사용 하고 있으면 생략 합니다) 289 | ```` 290 | Atlas atlas-t0pzlo-shard-0 [primary] myFirstDatabase> use samsungheavy 291 | switched to db samsungheavy 292 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> 293 | ```` 294 | 295 | 데이터를 조회 합니다 296 | ```` 297 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> db.handson.find({ssn:"123-456-0001"}) 298 | [ 299 | { 300 | _id: ObjectId("64454591813babb209a83f4d"), 301 | ssn: '123-456-0001', 302 | email: 'user@email.com', 303 | name: 'Gildong Hong', 304 | DateOfBirth: '1st Jan.', 305 | Hobbies: [ 'Martial arts' ], 306 | Addresses: [ 307 | { 308 | 'Address Name': 'Work', 309 | Street: '431, Teheran-ro GangNam-gu ', 310 | City: 'Seoul', 311 | Zip: '06159' 312 | } 313 | ], 314 | Phones: [ { type: 'mobile', number: '010-5555-1234' } ] 315 | } 316 | ] 317 | ```` 318 | 319 | #### Update Test 320 | 321 | Mongosh을 이용하여 Atlas와 연결하여 데이터를 업데이트 합니다. 322 | 323 | 먼저 데이터베이스를 선택하여야 합니다. (이미 해당 데이터베이스를 사용 하고 있으면 생략 합니다) 324 | ```` 325 | Atlas atlas-t0pzlo-shard-0 [primary] myFirstDatabase> use samsungheavy 326 | switched to db samsungheavy 327 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> 328 | ```` 329 | 330 | 수정할 데이터를 ssn을 입력 하여 줍니다. 331 | 수정 대상 데이터의 ssn 및 수정할 데이터 항목을 확인 수정 하여 줍니다. 332 | ````` 333 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> let query = {"ssn":"123-456-0001"} 334 | 335 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> let updatedata = { $set: { email: "gildong@email.com" } } 336 | 337 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> db.handson.updateOne(query,updatedata) 338 | { 339 | acknowledged: true, 340 | insertedId: null, 341 | matchedCount: 1, 342 | modifiedCount: 1, 343 | upsertedCount: 0 344 | } 345 | 346 | ````` 347 | 348 | 데이터를 수정 결과를 확인 합니다. (이메일 주소가 수정 된 것을 확인 합니다) 349 | ```` 350 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> db.handson.find({"ssn":"123-456-0001"}) 351 | [ 352 | { 353 | _id: ObjectId("64454591813babb209a83f4d"), 354 | ssn: '123-456-0001', 355 | email: 'gildong@email.com', 356 | name: 'Gildong Hong', 357 | DateOfBirth: '1st Jan.', 358 | Hobbies: [ 'Martial arts' ], 359 | Addresses: [ 360 | { 361 | 'Address Name': 'Work', 362 | Street: '431, Teheran-ro GangNam-gu ', 363 | City: 'Seoul', 364 | Zip: '06159' 365 | } 366 | ], 367 | Phones: [ { type: 'mobile', number: '010-5555-1234' } ] 368 | } 369 | ] 370 | ```` 371 | 372 | #### Update Hobbies Test 373 | 374 | Mongosh을 이용하여 Atlas와 연결하여 데이터를 업데이트 (Hobbies를 추가)합니다. 375 | 376 | 먼저 데이터베이스를 선택하여야 합니다. (이미 해당 데이터베이스를 사용 하고 있으면 생략 합니다) 377 | ```` 378 | Atlas atlas-t0pzlo-shard-0 [primary] myFirstDatabase> use samsungheavy 379 | switched to db samsungheavy 380 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> 381 | ```` 382 | 383 | 수정할 데이터를 ssn을 입력 하여 줍니다. 384 | 수정 대상 데이터의 ssn 및 Hobby 항목을 추가 하여 줍니다. (취미로 Reading 추가 하기) 385 | ````` 386 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> let query = {"ssn":"123-456-0001"} 387 | 388 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> let updatedata ={$push:{Hobbies:"Reading"}} 389 | 390 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> db.handson.updateOne(query,updatedata) 391 | { 392 | acknowledged: true, 393 | insertedId: null, 394 | matchedCount: 1, 395 | modifiedCount: 1, 396 | upsertedCount: 0 397 | } 398 | 399 | ````` 400 | 401 | 데이터를 수정 결과를 확인 합니다. (Hobby에 Reading이 추가되어 있음) 402 | ```` 403 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> db.handson.find({"ssn":"123-456-0001"}) 404 | [ 405 | { 406 | _id: ObjectId("64454591813babb209a83f4d"), 407 | ssn: '123-456-0001', 408 | email: 'gildong@email.com', 409 | name: 'Gildong Hong', 410 | DateOfBirth: '1st Jan.', 411 | Hobbies: [ 'Martial arts', 'Reading' ], 412 | Addresses: [ 413 | { 414 | 'Address Name': 'Work', 415 | Street: '431, Teheran-ro GangNam-gu ', 416 | City: 'Seoul', 417 | Zip: '06159' 418 | } 419 | ], 420 | Phones: [ { type: 'mobile', number: '010-5555-1234' } ] 421 | } 422 | ] 423 | 424 | ```` 425 | 426 | #### Remove Test 427 | 428 | Mongosh을 이용하여 Atlas와 연결하여 데이터를 삭제 합니다. 429 | 430 | 먼저 데이터베이스를 선택하여야 합니다. (이미 해당 데이터베이스를 사용 하고 있으면 생략 합니다) 431 | ```` 432 | Atlas atlas-t0pzlo-shard-0 [primary] myFirstDatabase> use samsungheavy 433 | switched to db samsungheavy 434 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> 435 | ```` 436 | 437 | 삭제할 데이터를 수정 하여 줍니다. 438 | 삭제할 데이터의 ssn 및 입력 하여줍니다. 439 | ````` 440 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> db.handson.deleteOne({ssn:"123-456-0001"}) 441 | { acknowledged: true, deletedCount: 1 } 442 | 443 | ````` 444 | 445 | 데이터를 확인 합니다. 446 | ```` 447 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> db.handson.findOne({ssn:"123-456-0001"}) 448 | null 449 | Atlas atlas-t0pzlo-shard-0 [primary] samsungheavy> 450 | ```` 451 | 452 | 453 | 454 | ### Compass 455 | 456 | MongoDB Cluster에 접속하여 저장된 데이터 등을 볼 수 있는 개발자용 GUI툴입니다. 이를 이용하여 데이터를 조회 하고 변경 하여 줍니다. 다음 링크에서 다운로드가 가능 합니다. 457 | Compass : 458 | https://www.mongodb.com/products/compass 459 | 460 | 테스트를 위해 다음 방법으로 데이터를 생성 하여 줍니다. 461 | ```` 462 | % node insertMany.js 463 | A document was inserted with the _id: 63e32381541c67cc69d78977 464 | A document was inserted with the _id: 63e32381541c67cc69d78978 465 | A document was inserted with the _id: 63e32381541c67cc69d78979 466 | A document was inserted with the _id: 63e32381541c67cc69d7897a 467 | ... 468 | ```` 469 | 470 | 데이터가 100건이 생성이 되게 됩니다. 471 | 472 | 473 | #### Connection 474 | MongoDB atlas Console에 접근 주소를 얻어야 합니다. 475 | 접속 주소를 얻기 위해 Console에 로그인합니다. 476 | 데이터베이스 클러스터의 Connect 버튼을 클릭 합니다. 477 | 478 | 479 | 480 | 접근방법을 선택 하여 주는 단계에서 Connect using MongoDB Compass를 선택 하면 접근 주소를 얻을 수 있습니다. 481 | 482 | 483 | 484 | Connection String을 복사하여 줍니다. 이후 Compass를 실행 하여 줍니다. 485 | 486 | 487 | 488 | 489 | 복사한 Connection String을 입력하여 줍니다. 490 | 491 | 492 | 493 | 494 | #### 데이터 조회 495 | 데이터베이스에서 생성한 handson 탭을 클릭 하면 컬렉션 리스트를 볼 수 있습니다. 생성한 user컬렉션을 선택 합니다. 496 | 497 | 498 | 499 | 데이터 검색을 위해서 Filter 부분에 검색 조건을 입력 하여 줍니다. 500 | ssn 이 123-456-0001 인 데이터를 찾기 위해 다음과 같이 입력 하여 줍니다. 501 | 502 | ```` 503 | {ssn: "123-456-0001"} 504 | ```` 505 | 506 | 507 | 508 | 나이(age)가 10 이상 40이하인 사람을 찾기를 합니다. 조건은 age >= 10 이고 age <=40으로 합니다. 509 | 510 | ```` 511 | {age: {$gte: 10, $lte: 40}} 512 | ```` 513 | 514 | 515 | 516 | 517 | 518 | ### option 519 | 생성된 데이터 베이스중 Movie 관련 데이터 컬렉션 (sample_mflix.movies)에서 다음 내용을 Query 합니다. 520 | 521 | - 1987 년에 나온 데이터 조회 (Where year = 1987) 522 | 523 | - 장르가 Comedy 에 속하는 영화 검색 524 | 525 | - 장르가 Comedy 하나 만 있는 데이터 검색 526 | 527 | - 장르가 Comedy 혹은 Drama 인 데이터 검색 528 | 529 | - imdb 의 평가 점수가 8.0 이상이고 등급이 PG 인 영화 검색 530 | 531 | - metacritic의 평점이 존재 하는 영화 검색 532 | 533 | - Dr. Strangelove 로 시작하는 영화 검색 534 | 535 | 해당 쿼리는 다음과 같습니다. 536 | - 1987 년에 나온 데이터 조회 (Where year = 1987) 537 | ```` 538 | db.movies.find({year:1987}) 539 | ```` 540 | 541 | 542 | - 장르가 Comedy 에 속하는 영화 검색 543 | ```` 544 | db.movies.find({genres: "Comedy"}) 545 | 546 | ```` 547 | 548 | 549 | - 장르가 Comedy 하나 만 있는 데이터 검색 550 | ```` 551 | db.movies.find({genres:["Comedy"]}) 552 | 553 | ```` 554 | 555 | 556 | - 장르가 Comedy 혹은 Drama 인 데이터 검색 557 | ```` 558 | db.movies.find({genres:{$in:["Comedy", "Drama"]}}) 559 | 560 | ```` 561 | 562 | 563 | - imdb 의 평가 점수가 8.0 이상이고 등급이 PG 인 영화 검색 564 | ```` 565 | db.movies.find({"imdb.rating" : {$gt: 8.0}, rated:"PG"}) 566 | 567 | ```` 568 | 569 | 570 | - metacritic의 평점이 존재 하는 영화 검색 571 | ```` 572 | db.movies.find({metacritic: {$exists: true}}) 573 | 574 | ```` 575 | 576 | 577 | - Dr. Strangelove 로 시작하는 영화 검색 578 | ```` 579 | db.movies.find({title: {$regex: '^Dr. Strangelove'}}) 580 | 581 | ```` 582 | 583 | -------------------------------------------------------------------------------- /01.Provision and CRUD/application/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | 106 | .DS_Store 107 | -------------------------------------------------------------------------------- /01.Provision and CRUD/application/connect.js: -------------------------------------------------------------------------------- 1 | 2 | const { MongoClient, ServerApiVersion } = require('mongodb'); 3 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 4 | 5 | const client = new MongoClient(uri, { useNewUrlParser: true, useUnifiedTopology: true, serverApi: ServerApiVersion.v1 }); 6 | client.connect(err => { 7 | const collection = client.db("test").collection("devices"); 8 | console.log("Connected successfully to server"); 9 | client.close(); 10 | }); 11 | -------------------------------------------------------------------------------- /01.Provision and CRUD/application/findOne.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | 7 | async function run() { 8 | try { 9 | const database = client.db("handson"); 10 | const userCollection = database.collection("user"); 11 | 12 | const result = await userCollection.findOne(); 13 | console.log(`Find One Record: ${result._id}`); 14 | 15 | const query = {}; 16 | 17 | const result2 = await userCollection.findOne(query); 18 | console.log(`Find One Record by SSN: ${result2._id}`); 19 | 20 | } finally { 21 | await client.close(); 22 | } 23 | } 24 | 25 | run().catch(console.dir); -------------------------------------------------------------------------------- /01.Provision and CRUD/application/insertMany.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | 7 | async function run() { 8 | try { 9 | const database = client.db("handson"); 10 | const userCollection = database.collection("user"); 11 | // create a document to insert 12 | 13 | 14 | for (let i=0; i <100; i++) 15 | { 16 | const newUser = { 17 | ssn:"123-456-000"+i, 18 | email:"user"+i+"@email.com", 19 | name:"Gildong Hong "+i, 20 | age: Math.floor(Math.random()*100), 21 | DateOfBirth: "1st Jan.", 22 | Hobbies:["Martial arts"], 23 | Addresses:[{"Address Name":"Work","Street":"431, Teheran-ro GangNam-gu ","City":"Seoul", "Zip":"06159"}], 24 | Phones:[{"type":"mobile","number":"010-5555-1234"}] 25 | }; 26 | 27 | const result = await userCollection.insertOne(newUser); 28 | console.log(`A document was inserted with the _id: ${result.insertedId}`); 29 | } 30 | 31 | } finally { 32 | await client.close(); 33 | } 34 | } 35 | 36 | run().catch(console.dir); -------------------------------------------------------------------------------- /01.Provision and CRUD/application/insertOne.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | 7 | async function run() { 8 | try { 9 | const database = client.db("handson"); 10 | const userCollection = database.collection("user"); 11 | // create a document to insert 12 | const newUser = {name:"Kyudong"}; 13 | 14 | const result = await userCollection.insertOne(newUser); 15 | console.log(`A document was inserted with the _id: ${result.insertedId}`); 16 | } finally { 17 | await client.close(); 18 | } 19 | } 20 | 21 | run().catch(console.dir); -------------------------------------------------------------------------------- /01.Provision and CRUD/application/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node2", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "nodemon index" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "cors": "^2.8.5", 13 | "dotenv": "^10.0.0", 14 | "express": "^4.17.1", 15 | "mongodb": "^4.1.2", 16 | "morgan": "^1.10.0", 17 | "nunjucks": "^3.2.3" 18 | }, 19 | "devDependencies": { 20 | "nodemon": "^2.0.15" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /01.Provision and CRUD/application/removeUser.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | 7 | async function run() { 8 | try { 9 | const database = client.db("handson"); 10 | const userCollection = database.collection("user"); 11 | 12 | const qeury = <>; 13 | 14 | const result = await userCollection.deleteOne(qeury); 15 | 16 | console.log( 17 | `${result.deletedCount} document(s) removed`, 18 | ); 19 | 20 | 21 | } finally { 22 | await client.close(); 23 | } 24 | } 25 | 26 | run().catch(console.dir); -------------------------------------------------------------------------------- /01.Provision and CRUD/application/updateHobbies.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | 7 | async function run() { 8 | try { 9 | const database = client.db("handson"); 10 | const userCollection = database.collection("user"); 11 | 12 | const query = <>; 13 | const updatedata = <>; 14 | 15 | const result = await userCollection.updateOne(query,updatedata); 16 | 17 | console.log( 18 | `${result.matchedCount} document(s) matched the filter, updated ${result.modifiedCount} document(s)`, 19 | ); 20 | 21 | 22 | } finally { 23 | await client.close(); 24 | } 25 | } 26 | 27 | run().catch(console.dir); -------------------------------------------------------------------------------- /01.Provision and CRUD/application/updateOne.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | 7 | async function run() { 8 | try { 9 | const database = client.db("handson"); 10 | const userCollection = database.collection("user"); 11 | 12 | const query = <>; 13 | const updatedata = <>; 14 | 15 | const result = await userCollection.updateOne(query,updatedata}); 16 | 17 | console.log( 18 | `${result.matchedCount} document(s) matched the filter, updated ${result.modifiedCount} document(s)`, 19 | ); 20 | 21 | 22 | } finally { 23 | await client.close(); 24 | } 25 | } 26 | 27 | run().catch(console.dir); -------------------------------------------------------------------------------- /01.Provision and CRUD/images/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/.DS_Store -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image01.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image02.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image03.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image04.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image04.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image05.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image05.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image06.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image06.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image07.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image07.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image08.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image08.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image09.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image09.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image10.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image11.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image12.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image13.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image14.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image14.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image15.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image15.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image16.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image17.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image17.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image20.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image20.png -------------------------------------------------------------------------------- /01.Provision and CRUD/images/image21.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/01.Provision and CRUD/images/image21.png -------------------------------------------------------------------------------- /02.Document Model/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | # MongoDB Atlas Hands-on Training 5 | 6 | ### [→ Bucket Pattern](#bucket) 7 | 8 | ### [→ Computed Pattern](#computed) 9 | 10 | ### [→ Versioing Pattern](#versioning) 11 | 12 | 13 |
14 | 15 | ### bucket 16 | Nodejs로 Atlas 에 접속 하고 MongoDB Query 를 이용하여 데이터를 생성 테스트 합니다. 17 | 코드는 application 폴더에 있으며 실행을 위해서는 NodeJS를 설치하고 테스트를 위해 관련 패키지를 설치 하여 줍니다. 18 | 19 | ```` 20 | % npm install 21 | 22 | added 196 packages, and audited 197 packages in 2s 23 | 24 | 14 packages are looking for funding 25 | run `npm fund` for details 26 | 27 | found 0 vulnerabilities 28 | ```` 29 | node_modules 폴더가 생성되어 관련된 라이브러리가 설치 됩니다. 30 | 31 | Atlas 와 연결 정보를 맞게 수정 하여 줍니다. 32 | 33 | ```` 34 | const uri =mongodb+srv://atlas-account:@cluster0.****.mongodb.net/myFirstDatabase?retryWrites=true&w=majority 35 | ```` 36 | 37 | application 폴더에서 bucket 스크립트를 실행 하여 데이터를 생성 합니다. 38 | 39 | ```` 40 | node bucket.js 41 | 42 | ```` 43 | 44 | 데이터를 생성하는 Query는 다음과 같습니다. 45 | 46 | ```` 47 | const sensor_id = "12345"; 48 | const bucket_range = 5; 49 | 50 | 51 | const device = {sensor_id:sensor_id, transaction_count: {$lt: bucket_range}}; 52 | 53 | 54 | for (let i=0; i <100; i++) 55 | { 56 | let temp= getRandomTemperature (30); 57 | let now = new Date(); 58 | const updateQuery = { 59 | $setOnInsert: {sensor_id : sensor_id, start_date:now.toISOString()}, 60 | $push: {measurements: {temperature: temp, timestamp: now.toISOString()}}, 61 | $inc: {transaction_count:1, sum_temperature: temp} 62 | }; 63 | 64 | const result = await userCollection.updateOne(device, updateQuery, {upsert:true}); 65 | } 66 | ```` 67 | 조회 조건으로 sensor_id 와 transaction_count 등으로 기존에 값이 존재 하는지를 확인 하고 문서가 없을 경우는 데이터를 새로 생성 하도록 upsert=true 조건을 이용합니다. 데이터는 Insert 시 생성되는 데이터와 기존 데이터를 update 하는 데이터를 구분하여 처리 하여 줍니다. 68 | 69 | 70 | ### computed 71 | 72 | Nodejs로 Atlas 에 접속 하고 MongoDB Query 를 이용하여 데이터를 생성 테스트 합니다. 73 | 코드는 application 폴더에 있으며 실행을 위해서는 NodeJS를 설치하고 테스트를 위해 관련 패키지를 설치 하여 줍니다. 74 | 75 | ```` 76 | % npm install 77 | 78 | added 196 packages, and audited 197 packages in 2s 79 | 80 | 14 packages are looking for funding 81 | run `npm fund` for details 82 | 83 | found 0 vulnerabilities 84 | ```` 85 | node_modules 폴더가 생성되어 관련된 라이브러리가 설치 됩니다. 86 | 87 | Atlas 와 연결 정보를 맞게 수정 하여 줍니다. 88 | 89 | ```` 90 | const uri =mongodb+srv://atlas-account:@cluster0.****.mongodb.net/myFirstDatabase?retryWrites=true&w=majority 91 | ```` 92 | 93 | application 폴더에서 computed 스크립트를 실행 하여 데이터를 생성 합니다. 94 | 95 | ```` 96 | node computed.js 97 | 98 | ```` 99 | 100 | 데이터를 생성하는 Query는 다음과 같습니다. 101 | 102 | ```` 103 | const sensor_id = "12345"; 104 | const bucket_range = 60; 105 | 106 | const device = {sensor_id:sensor_id, txCount: {$lt: bucket_range}}; 107 | 108 | for (let i=0; i <100; i++) 109 | { 110 | let temp= getRandomTemperature (30); 111 | let mois = getRandomTemperature (50); 112 | let now = new Date(); 113 | const updateQuery = { 114 | $setOnInsert: {sensor_id : sensor_id, start_date:now.toISOString()}, 115 | $push: {measurements: {temperature: temp, moisture: mois ,timestamp: now.toISOString()}}, 116 | $inc: {txCount:1, sum_temp: temp, sum_moisture: mois} 117 | }; 118 | 119 | const result = await computedCollection.updateOne(device, updateQuery, {upsert:true}); 120 | } 121 | 122 | ```` 123 | 데이터 생성시 $inc 를 이용하여 기존 값에 데이터를 추가 해주는 방식으로 사전에 데이터를 계산 하여 줍니다. 124 | 125 | 126 | ### versioning 127 | 128 | 129 | Nodejs로 Atlas 에 접속 하고 MongoDB Query 를 이용하여 데이터를 생성 테스트 합니다. 130 | 코드는 application 폴더에 있으며 실행을 위해서는 NodeJS를 설치하고 테스트를 위해 관련 패키지를 설치 하여 줍니다. 131 | 132 | ```` 133 | % npm install 134 | 135 | added 196 packages, and audited 197 packages in 2s 136 | 137 | 14 packages are looking for funding 138 | run `npm fund` for details 139 | 140 | found 0 vulnerabilities 141 | ```` 142 | node_modules 폴더가 생성되어 관련된 라이브러리가 설치 됩니다. 143 | 144 | Atlas 와 연결 정보를 맞게 수정 하여 줍니다. 145 | 146 | ```` 147 | const uri =mongodb+srv://atlas-account:@cluster0.****.mongodb.net/myFirstDatabase?retryWrites=true&w=majority 148 | ```` 149 | 150 | application 폴더에서 version 스크립트를 실행 하여 데이터를 생성 합니다. 151 | 152 | ```` 153 | node version1.js 154 | 155 | ```` 156 | 157 | 데이터를 생성하는 Query는 다음과 같습니다. 158 | 159 | 160 | ```` 161 | for (let i=0; i <100; i++) 162 | { 163 | const newUser = { 164 | schema_version: "1.0", 165 | ssn:"123-456-000"+i, 166 | email:"user"+i+"@email.com", 167 | name:"Gildong Hong "+i, 168 | age: Math.floor(Math.random()*100), 169 | DateOfBirth: "1st Jan.", 170 | Addresses:[{"Address Name":"Work","Street":"431, Teheran-ro GangNam-gu ","City":"Seoul", "Zip":"06159"}], 171 | Phones:[{"type":"mobile","number":"010-5555-1234"}] 172 | }; 173 | 174 | const result = await versionCollection.insertOne(newUser); 175 | console.log(`A document was inserted with the _id: ${result.insertedId}`); 176 | } 177 | 178 | ```` 179 | 180 | 데이터 스키마가 변경되는 경우 version을 지정 하여 데이터를 저장 합니다. 181 | 182 | ```` 183 | 184 | for (let i=0; i <100; i++) 185 | { 186 | const newUser = { 187 | schema_version: "2.0", 188 | ssn:"123-456-000"+i, 189 | email:"user"+i+"@email.com", 190 | name:"Gildong Hong "+i, 191 | age: Math.floor(Math.random()*100), 192 | DateOfBirth: "1st Jan.", 193 | Hobbies:["Martial arts"], 194 | Addresses:[{"Address Name":"Work","Street":"431, Teheran-ro GangNam-gu ","City":"Seoul", "Zip":"06159"}], 195 | Phones:[{"type":"mobile","number":"010-5555-1234"}] 196 | }; 197 | 198 | const result = await versionCollection.insertOne(newUser); 199 | console.log(`A document was inserted with the _id: ${result.insertedId}`); 200 | } 201 | 202 | ```` 203 | 204 | 데이터 조회 혹은 기존 데이터를 새로운 스키마로 저장 하는 경우 schema_version 정보를 이용하여 저장합니다. -------------------------------------------------------------------------------- /02.Document Model/application/bucket.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | function getRandomTemperature(max) { 7 | return Math.floor(Math.random() * max); 8 | } 9 | 10 | async function run() { 11 | try { 12 | const database = client.db("modeling"); 13 | const userCollection = database.collection("bucket"); 14 | // create a document to insert 15 | const sensor_id = "12345"; 16 | const bucket_range = 5; 17 | 18 | 19 | const device = {sensor_id:sensor_id, transaction_count: {$lt: bucket_range}}; 20 | 21 | 22 | for (let i=0; i <100; i++) 23 | { 24 | let temp= getRandomTemperature (30); 25 | let now = new Date(); 26 | const updateQuery = { 27 | $setOnInsert: {sensor_id : sensor_id, start_date:now.toISOString()}, 28 | $push: {measurements: {temperature: temp, timestamp: now.toISOString()}}, 29 | $inc: {transaction_count:1, sum_temperature: temp} 30 | }; 31 | 32 | const result = await userCollection.updateOne(device, updateQuery, {upsert:true}); 33 | } 34 | 35 | } finally { 36 | await client.close(); 37 | } 38 | } 39 | 40 | run().catch(console.dir); -------------------------------------------------------------------------------- /02.Document Model/application/computed.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | function getRandomTemperature(max) { 7 | return Math.floor(Math.random() * max); 8 | } 9 | 10 | async function run() { 11 | try { 12 | const database = client.db("modeling"); 13 | const computedCollection = database.collection("computed"); 14 | // create a document to insert 15 | const sensor_id = "12345"; 16 | const bucket_range = 60; 17 | 18 | 19 | const device = {sensor_id:sensor_id, txCount: {$lt: bucket_range}}; 20 | 21 | 22 | for (let i=0; i <100; i++) 23 | { 24 | let temp= getRandomTemperature (30); 25 | let mois = getRandomTemperature (50); 26 | let now = new Date(); 27 | const updateQuery = { 28 | $setOnInsert: {sensor_id : sensor_id, start_date:now.toISOString()}, 29 | $push: {measurements: {temperature: temp, moisture: mois ,timestamp: now.toISOString()}}, 30 | $inc: {txCount:1, sum_temp: temp, sum_moisture: mois} 31 | }; 32 | 33 | const result = await computedCollection.updateOne(device, updateQuery, {upsert:true}); 34 | } 35 | 36 | } finally { 37 | await client.close(); 38 | } 39 | } 40 | 41 | run().catch(console.dir); -------------------------------------------------------------------------------- /02.Document Model/application/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node2", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "nodemon index" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "cors": "^2.8.5", 13 | "dotenv": "^10.0.0", 14 | "express": "^4.17.1", 15 | "mongodb": "^4.1.2", 16 | "morgan": "^1.10.0", 17 | "nunjucks": "^3.2.3" 18 | }, 19 | "devDependencies": { 20 | "nodemon": "^2.0.15" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /02.Document Model/application/version1.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | 7 | async function run() { 8 | try { 9 | const database = client.db("modeling"); 10 | const versionCollection = database.collection("versioning"); 11 | 12 | 13 | for (let i=0; i <100; i++) 14 | { 15 | const newUser = { 16 | schema_version: "1.0", 17 | ssn:"123-456-000"+i, 18 | email:"user"+i+"@email.com", 19 | name:"Gildong Hong "+i, 20 | age: Math.floor(Math.random()*100), 21 | DateOfBirth: "1st Jan.", 22 | Addresses:[{"Address Name":"Work","Street":"431, Teheran-ro GangNam-gu ","City":"Seoul", "Zip":"06159"}], 23 | Phones:[{"type":"mobile","number":"010-5555-1234"}] 24 | }; 25 | 26 | const result = await versionCollection.insertOne(newUser); 27 | console.log(`A document was inserted with the _id: ${result.insertedId}`); 28 | } 29 | 30 | } finally { 31 | await client.close(); 32 | } 33 | } 34 | 35 | run().catch(console.dir); -------------------------------------------------------------------------------- /02.Document Model/application/version2.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | 7 | async function run() { 8 | try { 9 | const database = client.db("modeling"); 10 | const versionCollection = database.collection("versioning"); 11 | 12 | 13 | for (let i=0; i <100; i++) 14 | { 15 | const newUser = { 16 | schema_version: "2.0", 17 | ssn:"123-456-000"+i, 18 | email:"user"+i+"@email.com", 19 | name:"Gildong Hong "+i, 20 | age: Math.floor(Math.random()*100), 21 | DateOfBirth: "1st Jan.", 22 | Hobbies:["Martial arts"], 23 | Addresses:[{"Address Name":"Work","Street":"431, Teheran-ro GangNam-gu ","City":"Seoul", "Zip":"06159"}], 24 | Phones:[{"type":"mobile","number":"010-5555-1234"}] 25 | }; 26 | 27 | const result = await versionCollection.insertOne(newUser); 28 | console.log(`A document was inserted with the _id: ${result.insertedId}`); 29 | } 30 | 31 | } finally { 32 | await client.close(); 33 | } 34 | } 35 | 36 | run().catch(console.dir); -------------------------------------------------------------------------------- /03.index and aggregation/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/.DS_Store -------------------------------------------------------------------------------- /03.index and aggregation/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | # MongoDB Atlas Hands-on Training 5 | 6 | ## Index and Aggreagation 7 | 생성한 컬렉션에 인덱스를 생성하여 빠른 데이터 엑세스가 되는 것을 확인 합니다. 8 | 9 | ### [→ Index on Movies](#Index) 10 | 11 | ### [→ Aggregation](#Aggregation) 12 | 13 | ### [→ Lookup 을 이용한 조인](#Lookup) 14 | 15 | ### [→ 추가 Aggregation](#option) 16 | 17 |
18 | 19 | 20 | ### Index 21 | 22 | sample_mflix.movies 에서 2000년 이후에 개봉된 영화 중 "Bill Murray"가 출연한 영화 리스트를 검색 하고 제목 순서로 출력 합니다. 23 | 24 | Compass에서 movies 컬렉션을 선택 하고 Explain Plan 에서 실행 합니다. 25 | ```` 26 | db.movies.find( 27 | { 28 | "cast":"Bill Murray", 29 | "year":{$gte:2000} 30 | } 31 | ).sort( 32 | {"title":1} 33 | ) 34 | ```` 35 | 36 | 37 | No index available for this query 로 인덱스가 사용 되지 않은 것을 확인 할 수 있으며 Dcouments Examined의 갯수가 23530으로 전체 문서가 스캔 된 것을 확인 할 수 있습니다. 38 | 또한 Documnets Returned 가 12인 것으로 전체 문서 중 12개 문서가 리턴된 것으로 12개 문서를 찾기 위해 23530 문서를 검색한 것으로 비효율적인 것을 알 수 있습니다. 39 | 40 | E-S-R 규칙에 맞추어 인덱스를 생성 하고 Explain에서 개선된 사항을 확인 합니다. 41 | 42 | 43 | #### Index 생성 44 | 45 | 테스트를 위해 cast - year - title 순서로 인덱스를 생성 하고 테스트 합니다. 46 | 47 | 48 | 49 | 50 | 동일한 쿼리를 수행 하여 봅니다. 51 | 52 | 53 | 54 | 문서 스캔이 Index 스캔으로 변경 되고 기존에 비해 성능이 개선된 것을 확인 합니다. 55 | 56 | 첫 번째에서 IXSCAN으로 생성한 인덱스를 이용하여 12개의 문서가 검색된 것을 확인 할 수 있습니다. 이후 정렬 과정을 거친 후 데이터가 반환 되는 것을 확인 할 수 있습니다. 57 | 58 | 인덱스를 ESR 순서로 작성합니다. (cast-title-year) 59 | 동일한 쿼리를 실행 하여 플랜을 확인 합니다. 60 | 61 | 62 | 63 | Projection 항목에 title만을 출력 하도록 하고 Plan을 확인 합니다. 64 | 65 | 66 | ### Aggregation 67 | 68 | Movies 컬렉션에서 장르가 "Comedy" 인 영화 중 포함된 모든 국가를 기준으로 그룹하여 국가별 포함 개수를 "CountriesInComedy" 컬렉션에 데이터를 생성하여 줍니다. 69 | 70 | Aggregation 이 제공하는 Stage 중, Match 를 이용하여 장르가 Comedy인 것을 찾을 수 있으며, 배열로 되어 있는 항목을 개별로 전환은 unwind 를 이용합니다. 국가별로 그룹을 만들기 위해서는 group Stage를 활용 하며 결과 데이터를 컬렉션에 넣기 위해서는 out을 이용 합니다. 71 | 72 | matach 73 | Find와 유사한 형태로 사용 합니다. 74 | 75 | ```` 76 | {$match: 77 | { 78 | genres: 'Comedy', 79 | } 80 | } 81 | ```` 82 | 83 | unwind 84 | 배열을 항목을 지정하면 이를 개별 문서로 전환 하여 줍니다. 85 | 86 | ```` 87 | {$unwind: 88 | { 89 | path: '$countries', 90 | } 91 | } 92 | ```` 93 | 94 | group 95 | 지정된 필드를 기준으로 그룹하여 줍니다. SQL의 Group by 와 유사 합니다. 그룹에 따른 계산은 그룹별 카운트 한 횟수로 합니다. 96 | 97 | ```` 98 | {$group: 99 | { 100 | _id: '$countries', 101 | count: { 102 | $sum: 1, 103 | } 104 | } 105 | } 106 | ```` 107 | 108 | out 109 | 입력된 커서를 지정된 컬렉션으로 생성 하여 줍니다. 110 | 111 | ```` 112 | { 113 | $out: 'countriesByComedy', 114 | } 115 | ```` 116 | 117 | Compass 의 Aggregation에서 Stage를 생성 하여 줍니다. 118 | 119 | match stage 생성 하기 120 | 121 | 122 | 123 | unwind stage 생성 하기 124 | 125 | 126 | 127 | group stage 생성 하기 128 | 129 | 130 | 131 | out stage 생성 하기 132 | 133 | 134 | 135 | 생성된 컬렉션을 확인 합니다. out은 컬렉션을 생성하고 데이터를 생성 하여 줌으로 다시 aggregation을 실행 하기 위해서는 생성된 컬렉션을 삭제하고 실행 해줍니다. (실행 후 작성한 aggregation을 저장하여 줍니다.) 136 | 137 | 138 | 139 | 140 | #### Aggregation Node JS 실행 하기 141 | 142 | 작성한 Aggregation 코드를 Nodejs에서 실행 하도록 개발 합니다. 143 | 개발용 코드는 자동으로 생성 하여 줌으로 이를 이용 하도록 합니다. Compass에서 개발한 aggregation코드를 오픈하여 줍니다. 144 | 메뉴중 "EXPORT TO LANGUAGE"를 클릭 합니다. 145 | 146 | 147 | 148 | 개발 언어를 Node를 선택 하여 주고 코드를 복사하여 줍니다. 149 | 150 | 151 | 152 | application 의 aggregation.js 에 복사한 내용을 붙여 주기 합니다. 153 | 컬렉션을 만들지 않고 화면을 출력 하기 위해 out stage 는 생략 하고 작성 합니다. 154 | 155 | 복사한 내용을 pipeline 으로 작성 합니다. 156 | 157 | ```` 158 | const pipeline = [ 159 | { 160 | '$match': { 161 | 'genres': 'Comedy' 162 | } 163 | }, { 164 | '$unwind': { 165 | 'path': '$countries' 166 | } 167 | }, { 168 | '$group': { 169 | '_id': '$countries', 170 | 'count': { 171 | '$sum': 1 172 | } 173 | } 174 | } 175 | ]; 176 | ```` 177 | 178 | 코드를 다음과 같이 실행 하여 줍니다. 실행 전 필요한 모듈을 설치 하여 주고 실행 하여 줍니다. 179 | 180 | ```` 181 | application % npm install 182 | 183 | added 196 packages, and audited 197 packages in 2s 184 | 185 | 14 packages are looking for funding 186 | run `npm fund` for details 187 | 188 | found 0 vulnerabilities 189 | kyudong.kim@Kyudongui-MacBookPro application % node aggregation.js 190 | Aggregation Records : [object Object] 191 | kyudong.kim@Kyudongui-MacBookPro application % node aggregation.js 192 | ReferenceError: cursor is not defined 193 | at run (/Users/kyudong.kim/works/group_git/MongoDBAtlasTraining/03.index and aggregation/application/aggregation.js:34:7) 194 | kyudong.kim@Kyudongui-MacBookPro application % node aggregation.js 195 | Error: querySrv ENOTFOUND _mongodb._tcp.***.***.mongodb.net 196 | at QueryReqWrap.onresolve [as oncomplete] (node:internal/dns/promises:251:17) { 197 | errno: undefined, 198 | code: 'ENOTFOUND', 199 | syscall: 'querySrv', 200 | hostname: '_mongodb._tcp.***.***.mongodb.net' 201 | } 202 | application % node aggregation.js 203 | { _id: 'Portugal', count: 14 } 204 | { _id: 'Cameroon', count: 1 } 205 | { _id: 'Iraq', count: 1 } 206 | { _id: 'Italy', count: 477 } 207 | { _id: 'Romania', count: 25 } 208 | { _id: 'Germany', count: 442 } 209 | { _id: 'Iceland', count: 19 } 210 | { _id: 'Poland', count: 47 } 211 | { _id: 'Canada', count: 348 } 212 | { _id: 'Soviet Union', count: 39 } 213 | { _id: 'Brazil', count: 48 } 214 | { _id: 'UK', count: 696 } 215 | { _id: 'East Germany', count: 1 } 216 | { _id: 'Israel', count: 26 } 217 | { _id: 'Zaire', count: 1 } 218 | { _id: 'Cuba', count: 7 } 219 | { _id: 'Yugoslavia', count: 8 } 220 | { _id: 'Serbia and Montenegro', count: 5 } 221 | { _id: 'Albania', count: 1 } 222 | { _id: 'Japan', count: 171 } 223 | { _id: 'Spain', count: 197 } 224 | { _id: 'Czech Republic', count: 41 } 225 | { _id: 'Ireland', count: 73 } 226 | { _id: 'Sweden', count: 94 } 227 | { _id: 'Malta', count: 1 } 228 | { _id: 'Greece', count: 24 } 229 | { _id: 'United Arab Emirates', count: 6 } 230 | { _id: 'Serbia', count: 10 } 231 | { _id: 'Puerto Rico', count: 3 } 232 | { _id: 'Montenegro', count: 1 } 233 | { _id: 'Palestine', count: 1 } 234 | { _id: 'Liechtenstein', count: 1 } 235 | { _id: 'West Germany', count: 47 } 236 | { _id: 'Botswana', count: 2 } 237 | { _id: 'Colombia', count: 2 } 238 | { _id: 'Mexico', count: 62 } 239 | { _id: 'Tajikistan', count: 1 } 240 | { _id: "Cète d'Ivoire", count: 1 } 241 | { _id: 'Kazakhstan', count: 1 } 242 | { _id: 'Monaco', count: 1 } 243 | { _id: 'Denmark', count: 84 } 244 | { _id: 'Russia', count: 66 } 245 | { _id: 'Turkey', count: 23 } 246 | { _id: 'Latvia', count: 8 } 247 | { _id: 'Uzbekistan', count: 2 } 248 | { _id: 'Bolivia', count: 1 } 249 | { _id: 'Panama', count: 1 } 250 | { _id: 'Papua New Guinea', count: 1 } 251 | { _id: 'Iran', count: 13 } 252 | { _id: 'New Zealand', count: 29 } 253 | { _id: 'Greenland', count: 1 } 254 | { _id: 'Netherlands', count: 76 } 255 | { _id: 'Bulgaria', count: 4 } 256 | { _id: 'Croatia', count: 10 } 257 | { _id: 'Faroe Islands', count: 1 } 258 | { _id: 'Singapore', count: 6 } 259 | { _id: 'Norway', count: 56 } 260 | { _id: 'China', count: 50 } 261 | { _id: 'Slovakia', count: 6 } 262 | { _id: 'Armenia', count: 2 } 263 | { _id: 'Luxembourg', count: 20 } 264 | { _id: 'Austria', count: 33 } 265 | { _id: 'Chile', count: 7 } 266 | { _id: 'Indonesia', count: 2 } 267 | { _id: 'Rwanda', count: 1 } 268 | { _id: 'Angola', count: 1 } 269 | { _id: 'Slovenia', count: 6 } 270 | { _id: 'Jordan', count: 4 } 271 | { _id: 'Taiwan', count: 28 } 272 | { _id: 'Tunisia', count: 3 } 273 | { _id: 'Lebanon', count: 5 } 274 | { _id: 'Republic of Macedonia', count: 4 } 275 | { _id: 'Hungary', count: 31 } 276 | { _id: 'South Korea', count: 49 } 277 | { _id: 'Belgium', count: 112 } 278 | { _id: 'Uruguay', count: 6 } 279 | { _id: 'Finland', count: 104 } 280 | { _id: 'Bosnia and Herzegovina', count: 2 } 281 | { _id: 'Saudi Arabia', count: 1 } 282 | { _id: 'North Korea', count: 1 } 283 | { _id: 'Ukraine', count: 6 } 284 | { _id: 'Algeria', count: 1 } 285 | { _id: 'South Africa', count: 12 } 286 | { _id: 'India', count: 199 } 287 | { _id: 'Argentina', count: 54 } 288 | { _id: 'Egypt', count: 4 } 289 | { _id: 'Czechoslovakia', count: 21 } 290 | { _id: 'Philippines', count: 10 } 291 | { _id: 'Bhutan', count: 1 } 292 | { _id: 'Thailand', count: 20 } 293 | { _id: 'Federal Republic of Yugoslavia', count: 8 } 294 | { _id: 'Estonia', count: 8 } 295 | { _id: 'Peru', count: 1 } 296 | { _id: 'Senegal', count: 3 } 297 | { _id: 'Georgia', count: 3 } 298 | { _id: 'Australia', count: 148 } 299 | { _id: 'Malaysia', count: 4 } 300 | { _id: 'USA', count: 3843 } 301 | { _id: 'Nigeria', count: 1 } 302 | { _id: 'Lithuania', count: 2 } 303 | { _id: 'Qatar', count: 1 } 304 | { _id: 'Switzerland', count: 49 } 305 | { _id: 'France', count: 793 } 306 | { _id: 'Hong Kong', count: 117 } 307 | { _id: 'Kyrgyzstan', count: 1 } 308 | ```` 309 | 310 | ### Lookup 311 | 312 | sample_mflix.comments 와 sample_mflix.users 를 결합하여 데이터를 조회 합니다. 313 | users의 데이터 중 이름이 "Mercedes Tyler"인 사람을 찾아 그가 게시한 Comments 를 찾습니다. 314 | 315 | 해당 데이터를 검색 하면 다음과 같습니다. 316 | users 317 | ```` 318 | { 319 | "_id": { 320 | "$oid": "59b99dedcfa9a34dcd78862d" 321 | }, 322 | "name": "Mercedes Tyler", 323 | "email": "mercedes_tyler@fakegmail.com", 324 | "password": "$2b$12$ONDwIwR9NKF1Tp5GjGI12e8OFMxPELoFrk4x4Q3riJGWY6jl/UZAa" 325 | } 326 | ```` 327 | 328 | comments 의 경우 다음과 같습니다. 329 | ```` 330 | [{ 331 | "_id": { 332 | "$oid": "5a9427648b0beebeb69579e7" 333 | }, 334 | "name": "Mercedes Tyler", 335 | "email": "mercedes_tyler@fakegmail.com", 336 | "movie_id": { 337 | "$oid": "573a1390f29313caabcd4323" 338 | }, 339 | "text": "Eius veritatis vero facilis quaerat fuga temporibus. Praesentium expedita sequi repellat id. Corporis minima enim ex. Provident fugit nisi dignissimos nulla nam ipsum aliquam.", 340 | "date": { 341 | "$date": { 342 | "$numberLong": "1029646567000" 343 | } 344 | } 345 | }, 346 | ... 347 | ] 348 | ```` 349 | 350 | Lookup으로 조인을 하여 데이터를 볼 때는 전체 데이터를 조인 하는 것 보다 Match를 이용하여 Join 할 범위를 좁힌 후에 하는 것이 필요 합니다. 351 | 352 | Aggregation을 작성하기 위해 Compass에서 sample_mflix.users를 선택 합니다. 353 | Aggregation 탭에서 먼저 match 스테이지를 작성 합니다. 354 | 355 | Match 356 | ```` 357 | {$match: 358 | { 359 | name:"Mercedes Tyler" 360 | } 361 | } 362 | ```` 363 | 364 | Lookup 스테이지를 추가하여 줍니다. 365 | Lookup 366 | ```` 367 | {$lookup: 368 | { 369 | from: "comments", 370 | localField: "name", 371 | foreignField: "name", 372 | as: "Comments" 373 | } 374 | } 375 | ```` 376 | 377 | 378 | 379 | 결과로 다음과 같이 Comments를 포함한 결과가 보여 집니다. 380 | 381 | 382 | ```` 383 | { 384 | _id: ObjectId("59b99dedcfa9a34dcd78862d"), 385 | name: 'Mercedes Tyler', 386 | email: 'mercedes_tyler@fakegmail.com', 387 | password: '$2b$12$ONDwIwR9NKF1Tp5GjGI12e8OFMxPELoFrk4x4Q3riJGWY6jl/UZAa', 388 | Comments: [ 389 | { 390 | _id: ObjectId("5a9427648b0beebeb69579e7"), 391 | name: 'Mercedes Tyler', 392 | email: 'mercedes_tyler@fakegmail.com', 393 | movie_id: ObjectId("573a1390f29313caabcd4323"), 394 | text: 'Eius veritatis vero facilis quaerat fuga temporibus. Praesentium expedita sequi repellat id. Corporis minima enim ex. Provident fugit nisi dignissimos nulla nam ipsum aliquam.', 395 | date: 2002-08-18T04:56:07.000Z 396 | }, 397 | { 398 | _id: ObjectId("5a9427648b0beebeb6958131"), 399 | name: 'Mercedes Tyler', 400 | email: 'mercedes_tyler@fakegmail.com', 401 | movie_id: ObjectId("573a1392f29313caabcdb8ac"), 402 | text: 'Dolores nulla laborum doloribus tempore harum officiis. Rerum blanditiis aperiam nemo dignissimos a magni natus. Tenetur suscipit cumque sint dignissimos. Accusantium eveniet consequuntur officia ea.', 403 | date: 2007-09-21T08:52:00.000Z 404 | }, 405 | { 406 | _id: ObjectId("5a9427648b0beebeb69582cb"), 407 | name: 'Mercedes Tyler', 408 | email: 'mercedes_tyler@fakegmail.com', 409 | movie_id: ObjectId("573a1393f29313caabcdbe7c"), 410 | text: 'Voluptatem ad enim corrupti esse consectetur. Explicabo voluptates quo aperiam deleniti reiciendis. Temporibus aliquid delectus recusandae commodi.', 411 | date: 2008-05-17T22:55:39.000Z 412 | }, 413 | { 414 | _id: ObjectId("5a9427648b0beebeb69582cc"), 415 | name: 'Mercedes Tyler', 416 | email: 'mercedes_tyler@fakegmail.com', 417 | movie_id: ObjectId("573a1393f29313caabcdbe7c"), 418 | text: 'Fuga nihil dolor veniam repudiandae. Rem debitis ex porro dolorem maxime laborum. Esse molestias accusamus provident unde. Sint cupiditate cumque corporis nulla explicabo fuga.', 419 | date: 2011-03-01T12:06:42.000Z 420 | }, 421 | { 422 | _id: ObjectId("5a9427648b0beebeb69588e6"), 423 | name: 'Mercedes Tyler', 424 | email: 'mercedes_tyler@fakegmail.com', 425 | movie_id: ObjectId("573a1393f29313caabcde00c"), 426 | text: 'Et quas doloribus ipsum sapiente amet enim optio. Magni odio pariatur quos. Voluptatum error ipsum nemo similique error vel.', 427 | date: 1971-05-13T02:38:19.000Z 428 | }, 429 | { 430 | _id: ObjectId("5a9427648b0beebeb69589a1"), 431 | name: 'Mercedes Tyler', 432 | email: 'mercedes_tyler@fakegmail.com', 433 | movie_id: ObjectId("573a1393f29313caabcde4a8"), 434 | text: 'Ipsam quos magnam ipsum odio aspernatur voluptas nihil nesciunt. Deserunt magni corporis aperiam. Delectus blanditiis eius molestiae modi velit illo veritatis.', 435 | date: 2015-12-10T21:26:15.000Z 436 | }, 437 | { 438 | _id: ObjectId("5a9427648b0beebeb6958aeb"), 439 | name: 'Mercedes Tyler', 440 | email: 'mercedes_tyler@fakegmail.com', 441 | movie_id: ObjectId("573a1394f29313caabcde63e"), 442 | text: 'Magnam repudiandae ipsam perspiciatis. Tenetur commodi tenetur dolorem tempora. Quas a quos laboriosam.', 443 | date: 2007-09-19T02:17:40.000Z 444 | }, 445 | ... 446 | ] 447 | } 448 | ```` 449 | 450 | 451 | 452 | ### option 453 | #### Aggregation Group 454 | 다음과 같은 과일 판매 데이터가 있을 때 일자별로 판매된 과일과 총 판매 금액을 계산 합니다. 455 | 456 | ```` 457 | db.sales.insertMany([ 458 | { "_id" : 1, "item" : "apple", "price" : 10, "quantity" : 2, "date" : ISODate("2023-01-01T08:00:00Z") }, 459 | { "_id" : 2, "item" : "grape", "price" : 20, "quantity" : 1, "date" : ISODate("2023-02-03T09:00:00Z") }, 460 | { "_id" : 3, "item" : "melon", "price" : 5, "quantity" : 5, "date" : ISODate("2023-02-03T09:05:00Z") }, 461 | { "_id" : 4, "item" : "apple", "price" : 10, "quantity" : 10, "date" : ISODate("2023-02-15T08:00:00Z") }, 462 | { "_id" : 5, "item" : "melon", "price" : 5, "quantity" : 10, "date" : ISODate("2023-02-15T09:12:00Z") } 463 | ]) 464 | 465 | ```` 466 | 467 | 일자 데이터를 기준으로 그룹을 생성하고 accumulation 으로 addToSet, sum 을 이용합니다. 468 | 469 | ```` 470 | db.sales.aggregate( 471 | [ 472 | { 473 | $group: 474 | { 475 | _id: { day: { $dayOfYear: "$date"}, year: { $year: "$date" } }, 476 | itemsSold: { $addToSet: "$item" }, 477 | total_price: {$sum: "$price"} 478 | } 479 | } 480 | ] 481 | ) 482 | 483 | { 484 | _id: { 485 | day: 34, 486 | year: 2023 487 | }, 488 | itemsSold: [ 489 | 'grape', 490 | 'melon' 491 | ], 492 | total_price: 25 493 | } 494 | { 495 | _id: { 496 | day: 46, 497 | year: 2023 498 | }, 499 | itemsSold: [ 500 | 'melon', 501 | 'apple' 502 | ], 503 | total_price: 15 504 | } 505 | { 506 | _id: { 507 | day: 1, 508 | year: 2023 509 | }, 510 | itemsSold: [ 511 | 'apple' 512 | ], 513 | total_price: 10 514 | } 515 | ```` 516 | #### Aggregation Bucket 517 | 화가의 프로파일 정보에서 태어난 년도를 기준으로 하여 그룹을 생성 합니다. 년도는 10년을 기준으로 집계 합니다. 즉 1840 ~1850 년으로 집계 합니다. 518 | 519 | ```` 520 | db.artists.insertMany([ 521 | { "_id" : 1, "last_name" : "Bernard", "first_name" : "Emil", "year_born" : 1868, "year_died" : 1941, "nationality" : "France" }, 522 | { "_id" : 2, "last_name" : "Rippl-Ronai", "first_name" : "Joszef", "year_born" : 1861, "year_died" : 1927, "nationality" : "Hungary" }, 523 | { "_id" : 3, "last_name" : "Ostroumova", "first_name" : "Anna", "year_born" : 1871, "year_died" : 1955, "nationality" : "Russia" }, 524 | { "_id" : 4, "last_name" : "Van Gogh", "first_name" : "Vincent", "year_born" : 1853, "year_died" : 1890, "nationality" : "Holland" }, 525 | { "_id" : 5, "last_name" : "Maurer", "first_name" : "Alfred", "year_born" : 1868, "year_died" : 1932, "nationality" : "USA" }, 526 | { "_id" : 6, "last_name" : "Munch", "first_name" : "Edvard", "year_born" : 1863, "year_died" : 1944, "nationality" : "Norway" }, 527 | { "_id" : 7, "last_name" : "Redon", "first_name" : "Odilon", "year_born" : 1840, "year_died" : 1916, "nationality" : "France" }, 528 | { "_id" : 8, "last_name" : "Diriks", "first_name" : "Edvard", "year_born" : 1855, "year_died" : 1930, "nationality" : "Norway" } 529 | ]) 530 | ```` 531 | 532 | 태어난 년도를 기준으로 하여 집계를 위해서 bucket을 이용하여 groupBy 항목으로 year_born을 하여 줍니다. 태어난 년도의 집계는 10년을 기준으로 category화는 boundaries레 작성 기준을 작성하여 줍니다. 533 | 534 | ```` 535 | db.artists.aggregate( [ 536 | { 537 | $bucket: { 538 | groupBy: "$year_born", // Field to group by 539 | boundaries: [ 1840, 1850, 1860, 1870, 1880 ], // Boundaries for the buckets 540 | default: "Other", // Bucket ID for documents which do not fall into a bucket 541 | output: { // Output for each bucket 542 | "count": { $sum: 1 }, 543 | "artists" : 544 | { 545 | $push: { 546 | "name": { $concat: [ "$first_name", " ", "$last_name"] }, 547 | "year_born": "$year_born" 548 | } 549 | } 550 | } 551 | } 552 | } 553 | ] ) 554 | 555 | 556 | { 557 | _id: 1840, 558 | count: 1, 559 | artists: [ 560 | { 561 | name: 'Odilon Redon', 562 | year_born: 1840 563 | } 564 | ] 565 | } 566 | { 567 | _id: 1850, 568 | count: 2, 569 | artists: [ 570 | { 571 | name: 'Vincent Van Gogh', 572 | year_born: 1853 573 | }, 574 | { 575 | name: 'Edvard Diriks', 576 | year_born: 1855 577 | } 578 | ] 579 | } 580 | { 581 | _id: 1860, 582 | count: 4, 583 | artists: [ 584 | { 585 | name: 'Emil Bernard', 586 | year_born: 1868 587 | }, 588 | { 589 | name: 'Joszef Rippl-Ronai', 590 | year_born: 1861 591 | }, 592 | { 593 | name: 'Alfred Maurer', 594 | year_born: 1868 595 | }, 596 | { 597 | name: 'Edvard Munch', 598 | year_born: 1863 599 | } 600 | ] 601 | } 602 | { 603 | _id: 1870, 604 | count: 1, 605 | artists: [ 606 | { 607 | name: 'Anna Ostroumova', 608 | year_born: 1871 609 | } 610 | ] 611 | } 612 | 613 | ```` 614 | 615 | #### Aggregation Unwind 616 | 다음과 같은 의류 정보가 있을 때 의류를 기준으로 가능한 사이즈 정보가 배열화 되어 있습니다. 각 사이즈를 구분하여 문서화를 합니다. 617 | 사이즈가 없는 의류들은 이를 포함 함니다. 618 | 619 | ```` 620 | db.clothing.insertMany([ 621 | { "_id" : 1, "item" : "Shirt", "sizes": [ "S", "M", "L"] }, 622 | { "_id" : 2, "item" : "Shorts", "sizes" : [ ] }, 623 | { "_id" : 3, "item" : "Hat", "sizes": "M" }, 624 | { "_id" : 4, "item" : "Gloves" }, 625 | { "_id" : 5, "item" : "Scarf", "sizes" : null } 626 | ]) 627 | ```` 628 | 배열로 되어 있는 값을 하나의 문서로 만들어 주기 위해 unwind를 사용합니다. 기본적으로 지정된 array (size)에 값이 없는 경우 연산에서 제외 합니다. 이를 포함하도록 하는 옵션은 preserveAndEmptyArrays입니다. 629 | 630 | 631 | ```` 632 | 633 | db.clothing.aggregate( [ 634 | { $unwind: { path: "$sizes", preserveNullAndEmptyArrays: true } } 635 | ] ) 636 | 637 | { 638 | _id: 1, 639 | item: 'Shirt', 640 | sizes: 'S' 641 | } 642 | { 643 | _id: 1, 644 | item: 'Shirt', 645 | sizes: 'M' 646 | } 647 | { 648 | _id: 1, 649 | item: 'Shirt', 650 | sizes: 'L' 651 | } 652 | { 653 | _id: 2, 654 | item: 'Shorts' 655 | } 656 | { 657 | _id: 3, 658 | item: 'Hat', 659 | sizes: 'M' 660 | } 661 | { 662 | _id: 4, 663 | item: 'Gloves' 664 | } 665 | { 666 | _id: 5, 667 | item: 'Scarf', 668 | sizes: null 669 | } 670 | ```` 671 | 672 | 673 | #### 좌표 정보 검색 674 | sample_airbnb.listingsAndReviews 컬렉션에는 숙박 시설 정보를 가진 문서이며 해당 숙박시설의 지리 정보가 좌표로 입력 되어 있습니다. (address.location) 마드리드 공항을 기준으로 가장 가까운 숙박 시설을 검색 합니다. 마드리드 공항의 좌표 정보는 -3.56744, 40.49845 이며 검색하려는 숙박 시설을 Hotel 과 Apartments 입니다. 보는 데이터는 숙박 시설의 이름과 주소, 떨어진 거리, 금액으로 합니다. (name, property_type, summary, address, price) 675 | 676 | 검색은 geoNear 스테이지를 이용하여 검색 하며 전체 데이터중 보고자 하는 필드만을 제한 하기 위해 project 스테이지를 사용 합니다. 677 | 678 | 679 | ```` 680 | db.listingsAndReviews.aggregate( [ 681 | { $geoNear: { 682 | near: { type: 'Point', coordinates: [ -3.56744, 40.49845]}, 683 | distanceField:"distance", 684 | key:"address.location", 685 | query: {property_type: {$in: ["Hotel","Apartment"]}}, 686 | spherical: true 687 | } }, 688 | { $project: {name:1, property_type:1, 689 | summary:1, address:1, 690 | price:1, distance:1} 691 | } 692 | ] ) 693 | 694 | { 695 | _id: '18426634', 696 | name: 'Private room', 697 | summary: 'Intermarche', 698 | property_type: 'Apartment', 699 | price: Decimal128("78.00"), 700 | address: { 701 | street: 'Porto, Porto, Portugal', 702 | suburb: '', 703 | government_area: 'Canedo, Vale e Vila Maior', 704 | market: 'Porto', 705 | country: 'Portugal', 706 | country_code: 'PT', 707 | location: { 708 | type: 'Point', 709 | coordinates: [ 710 | -8.4022, 711 | 41.00962 712 | ], 713 | is_location_exact: false 714 | } 715 | }, 716 | distance: 411593.1181197846 717 | } 718 | { 719 | _id: '21883829', 720 | name: 'Terraço', 721 | summary: `La maison dispose de 4 chambres et un canapé-lit, est bon pour le repos et est situé dans un quartier calme et magnifique. Il est proche de la plage d'Espinho et si vous préférez visiter une zone naturelle, nous avons les "Passadiços de Arouca", c'est une zone très belle et relaxante. Ici, dans ce village, nous avons aussi un excellent restaurant qu'ils dépensent des repas économiques et très bons.`, 722 | property_type: 'Apartment', 723 | price: Decimal128("40.00"), 724 | address: { 725 | street: 'Aveiro, Aveiro, Portugal', 726 | suburb: '', 727 | government_area: 'Lobão, Gião, Louredo e Guisande', 728 | market: 'Porto', 729 | country: 'Portugal', 730 | country_code: 'PT', 731 | location: { 732 | type: 'Point', 733 | coordinates: [ 734 | -8.45777, 735 | 40.98082 736 | ], 737 | is_location_exact: true 738 | } 739 | }, 740 | distance: 415895.69466630125 741 | } 742 | { 743 | _id: '23391765', 744 | name: 'Cozy Flat, São João da Madeira', 745 | summary: 'Um apartamento com quarto de cama de casal, wc’s privativos, sala e cozinha, equipado com tudo que precisa. Uma excelente opção para amantes de caminhadas, cultura e lazer. Se o seu motivo de visita for meramente profissional vai sentir-se em casa. Situa-se junto dos serviços e comércios necessários e a área é servida por transportes públicos. Para além de São João da Madeira, poderá visitar Santa Maria da Feira e Estarreja em poucos minutos. Localiza-se a cerca de 45 km do Porto e Aveiro.', 746 | property_type: 'Apartment', 747 | price: Decimal128("45.00"), 748 | address: { 749 | street: 'São João da Madeira, Aveiro, Portugal', 750 | suburb: '', 751 | government_area: 'São João da Madeira', 752 | market: 'Porto', 753 | country: 'Portugal', 754 | country_code: 'PT', 755 | location: { 756 | type: 'Point', 757 | coordinates: [ 758 | -8.48714, 759 | 40.895 760 | ], 761 | is_location_exact: true 762 | } 763 | }, 764 | distance: 417500.0627241467 765 | } 766 | { 767 | _id: '30341193', 768 | name: 'Recanto agua', 769 | summary: 'No meio da cidade, da para andar sempre a pé.', 770 | property_type: 'Apartment', 771 | price: Decimal128("25.00"), 772 | address: { 773 | street: 'São João da Madeira, Aveiro, Portugal', 774 | suburb: '', 775 | government_area: 'São João da Madeira', 776 | market: 'Porto', 777 | country: 'Portugal', 778 | country_code: 'PT', 779 | location: { 780 | type: 'Point', 781 | coordinates: [ 782 | -8.49682, 783 | 40.89102 784 | ], 785 | is_location_exact: true 786 | } 787 | }, 788 | distance: 418278.04115931864 789 | } 790 | ```` -------------------------------------------------------------------------------- /03.index and aggregation/application/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | 106 | .DS_Store 107 | -------------------------------------------------------------------------------- /03.index and aggregation/application/aggregation.js: -------------------------------------------------------------------------------- 1 | const { MongoClient, ServerApiVersion } = require('mongodb'); 2 | const uri = "mongodb+srv://:@***.***.mongodb.net/?retryWrites=true&w=majority"; 3 | 4 | const client = new MongoClient(uri); 5 | 6 | 7 | async function run() { 8 | try { 9 | const database = client.db("sample_mflix"); 10 | const userCollection = database.collection("movies"); 11 | 12 | const pipeline = [ 13 | { 14 | '$match': { 15 | 'genres': 'Comedy' 16 | } 17 | }, { 18 | '$unwind': { 19 | 'path': '$countries' 20 | } 21 | }, { 22 | '$group': { 23 | '_id': '$countries', 24 | 'count': { 25 | '$sum': 1 26 | } 27 | } 28 | } 29 | ]; 30 | 31 | 32 | const result = await userCollection.aggregate(pipeline); 33 | 34 | await result.forEach(console.dir); 35 | 36 | } finally { 37 | await client.close(); 38 | } 39 | } 40 | 41 | 42 | run().catch(console.dir); -------------------------------------------------------------------------------- /03.index and aggregation/application/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node2", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "nodemon index" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "cors": "^2.8.5", 13 | "dotenv": "^10.0.0", 14 | "express": "^4.17.1", 15 | "mongodb": "^4.1.2", 16 | "morgan": "^1.10.0", 17 | "nunjucks": "^3.2.3" 18 | }, 19 | "devDependencies": { 20 | "nodemon": "^2.0.15" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /03.index and aggregation/images/image01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image01.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image02.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image03.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image04.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image04.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image05.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image05.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image06.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image06.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image07.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image07.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image08.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image08.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image09.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image09.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image10.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image11.png -------------------------------------------------------------------------------- /03.index and aggregation/images/image12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/03.index and aggregation/images/image12.png -------------------------------------------------------------------------------- /04.atlas-search/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/.DS_Store -------------------------------------------------------------------------------- /04.atlas-search/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (https://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # Typescript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # Yarn Integrity file 55 | .yarn-integrity 56 | 57 | # dotenv environment variables file 58 | .env 59 | 60 | # next.js build output 61 | .next 62 | 63 | config.js 64 | data/dump 65 | package-lock.json 66 | .vscode/ -------------------------------------------------------------------------------- /04.atlas-search/README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # MongoDB Atlas Search Lab 4 | 5 | This is a lab to help you gain experience using MongoDB Atlas Search. The original repo is [here](https://github.com/10gen/search_training_lab), but the repo is private. 6 | 7 | In this lab, you will start with an unfinished application built to search through forum posts which are real-world data pulled from MongoDB's community forums. 8 | Your goal in each lab excercise is to complete the Atlas Search query pipeline. Outside of writing pipeline, you will not need to edit any code. 9 | If you are not familier with "nodejs" then go to lab "Search in Compass". This lab is making search with MongoDB Compass rather than NodeJS. However you still need to edit search query. 10 | 11 | ## Schedule 12 | 13 | - [Setup](#prep-setup) 14 | - [Lab 1: Fuzzy & Synonyms](#lab-1-fuzzy--synonyms) 15 | - [Lab 2: Autocomplete](#lab-2-autocomplete) 16 | - [Lab 3: Compound query](#lab-3-compound-query) 17 | - [Lab 4: Search for full title - exact match](#lab-4-exact-match) 18 | - [Lab 5: Distance Search - `near` operator](#lab-5-near-operator) 19 | - [Lab 6: Advanced Search - `queryString` operator](#lab-6-querystring-operator) 20 | - [Lab 7: Facets - bucketing search results](#lab-7-facets) 21 | - [Lab 8: One indexing - Mix&Match](#lab-8-one-indexing) 22 | - [Search in Compass](#search-in-compass) 23 | 24 | 25 | 26 | ## Prep: Setup 27 | 28 | **Requisites**: 29 | 30 | - Atlas cluster M0 or higher 31 | - [mongorestore](https://www.mongodb.com/docs/database-tools/mongorestore/)([install](https://www.mongodb.com/try/download/database-tools)) 32 | - nodeJS 33 | - npm 34 | 35 | To get started, spin up an [Atlas M0 free tier](https://cloud.mongodb.com) using a cloud provider of your choice. 36 | While the cluster is being created, ensure that **nodeJS** and **npm** package manager are in place on your laptop. 37 | 38 | From within `04.atlas-search/`, 39 | 40 | 1. run `npm install` 41 | 1. Copy `sample_config.js` and name it `config.js`. Set `atlasURI` 42 | 1. run `npm test`. All the unit tests should fail at the moment. 43 | 1. move to `04.atlas-search/data/` directory 44 | 45 | From within `04.atlas-search/data/`, 46 | 47 | 5. run `tar -zxvf dump.tar.gz` 48 | 6. run `mongorestore ` 49 | 7. confirm `forum_db` is created and has 3 collections 50 | > posts 51 | > replies 52 | > synonyms 53 | 54 | `````bash 55 | data % mongorestore mongodb+srv://admin:*****@<>.mongodb.net/ 56 | 2023-03-21T11:54:04.822+0900 WARNING: On some systems, a password provided directly in a connection string or using --uri may be visible to system status programs such as `ps` that may be invoked by other users. Consider omitting the password to provide it via stdin, or using the --config option to specify a configuration file with the password. 57 | 2023-03-21T11:54:05.601+0900 using default 'dump' directory 58 | 2023-03-21T11:54:05.601+0900 preparing collections to restore from 59 | 2023-03-21T11:54:05.612+0900 reading metadata for forum_db.posts from dump/forum_db/posts.metadata.json 60 | 2023-03-21T11:54:05.612+0900 reading metadata for forum_db.replies from dump/forum_db/replies.metadata.json 61 | 2023-03-21T11:54:05.612+0900 reading metadata for forum_db.synonyms from dump/forum_db/synonyms.metadata.json 62 | 2023-03-21T11:54:05.732+0900 restoring forum_db.posts from dump/forum_db/posts.bson 63 | 2023-03-21T11:54:05.744+0900 restoring forum_db.replies from dump/forum_db/replies.bson 64 | 2023-03-21T11:54:05.764+0900 restoring forum_db.synonyms from dump/forum_db/synonyms.bson 65 | 2023-03-21T11:54:05.780+0900 finished restoring forum_db.synonyms (1 document, 0 failures) 66 | 2023-03-21T11:54:06.734+0900 finished restoring forum_db.posts (5000 documents, 0 failures) 67 | 2023-03-21T11:54:08.585+0900 [#################.......] forum_db.replies 34.2MB/46.2MB (74.0%) 68 | 2023-03-21T11:54:09.656+0900 [########################] forum_db.replies 46.2MB/46.2MB (100.0%) 69 | 2023-03-21T11:54:09.656+0900 finished restoring forum_db.replies (34654 documents, 0 failures) 70 | 2023-03-21T11:54:09.656+0900 restoring users from dump/admin/system.users.bson 71 | 2023-03-21T11:54:09.703+0900 restoring roles from dump/admin/system.roles.bson 72 | 2023-03-21T11:54:09.774+0900 Failed: restore error: error running merge command: (Unauthorized) not authorized on admin to execute command { _mergeAuthzCollections: 1, tempUsersCollection: "admin.tempusers", tempRolesCollection: "admin.temproles", drop: false, db: "", writeConcern: { w: "majority" }, lsid: { id: UUID("bd28e6f6-76ee-4439-b76c-bff2785e2f05") }, $clusterTime: { clusterTime: Timestamp(1679367249, 5662), signature: { hash: BinData(0, 310FA0ECBA937791B82DCB7125EE2482C7964A7A), keyId: 7165675062429745153 } }, $db: "admin", $readPreference: { mode: "primary" } } 73 | 2023-03-21T11:54:09.774+0900 39655 document(s) restored successfully. 0 document(s) failed to restore. 74 | ````` 75 | 76 | Back to `04.atlas-search/`, 77 | 78 | 8. run `npm start` 79 | 9. open `http://localhost:3000` from your browser 80 | 81 | Right now if you type query into the application search box and hit the "Run Search" button, nothing happens. 82 | Let's fix that. 83 | 84 | > You can check out the git branch `search-sol` to peek at the solutions for lab 1~7. 85 | 86 | ## Lab 1: Fuzzy & Synonyms 87 | 88 | Create the first search index on `forum_db.posts`. 89 | 90 | - index name: `language_index` 91 | - analyzer: `lucene.english` for both index and search 92 | - dynamic mapping: `true` 93 | 94 | 95 | 96 | ### Lab 1-a: Fuzzy search 97 | 98 | 1st fuzzy search query: open `labs/lab1.js` and complete the pipeline `$search` stage 99 | 100 | - use `text` operator 101 | - `term` is passed in for search term to use 102 | - enable fuzzy search 103 | 104 | > Now, unit test lab1 should pass 105 | > In the app, you will get results containing "replica set" when typing in typos like "reeplica sat" 106 | 107 | 108 | 109 | 110 | ### Lab 1-b: Synonym search 111 | 112 | Update the index to enable synonyms 113 | 114 | - synonym mapping name: `my-mapping` 115 | - use `synonyms` for source collection 116 | - analyzer: `lucene.english` 117 | 118 | Following is configuration of Synonyms Mappings on the search index. 119 | 120 | 121 | Update the query to support synonyms instead of fuzzy. 122 | 123 | > Unit test lab1 should pass 124 | 125 | `forum_db.synonyms` has an `equivalent` mapping for `["node", "server", "instance", "crustacean"]`. 126 | The first 3 make sense but why `crustacean`? 127 | It's because each and every document is likely to contain one of 3 so it's not easy to tell that our synonym really works. So the most unlikely synonym is added. 128 | You must be able to find documents with node, server, or instance if looking for `crustacean`. 129 | 130 | 131 | 132 | 133 | ## Lab 2: Autocomplete 134 | 135 | Modify the index and support the simplest form of autocomplete on `post_title` field. 136 | Do not change any default values like `Max/Min Grams`, `Tokenization`, and `Fold Diacritics`. 137 | 138 | 139 | 140 | Complete `labs/lab2.js` pipeline to support autocomplete on `post_title`. 141 | 142 | Now unit test lab2 must pass. 143 | And the app will show search results as you type according to title. 144 | 145 | 146 | ## Lab 3: Compound query 147 | 148 | Now time to implement advanced logic. 149 | For example, what if I want to do a text search but filter the result down to just posts by MongoDB employees? 150 | Add `$match` stage after `$search` can do but it's inefficient. 151 | You're better off letting Atlas Search handle filter. 152 | 153 | To do that, we can use `compound` operator. 154 | 155 | In the app UI, there's a checkbox, `Show Only MongoDB Employee Responses`. 156 | Let's make this work! 157 | 158 | Leverage `mongodb_staff` field but you don't need to add it to the index because your index is dynamically mapped. 159 | 160 | Use `compound` operator and complete `labs/lab3.js` pipeline to search by `post_text` and filter by `mongodb_staff`. 161 | 162 | - `must`: `post_text` 163 | - `filter`: `mongodb_staff` 164 | 165 | 166 | 167 | 168 | 169 | Now unit test lab3 should pass. 170 | 171 | ## Lab 4: Exact match 172 | 173 | **Use case**: 174 | If the user wraps a query in quotes, we want to find exactly matching phrase as a whole in the title. 175 | 176 | Why not do with MongoDB native index using `$regex` on MQL `find` or aggregation `$match` stage? 177 | If not the term starts in the beginning of the field, it takes long to slide the term through the field. Atlas Search can do the job faster and better. 178 | 179 | To support this, `keyword` analyzer should be used so create another index, 180 | 181 | - index name: `keyword_index` 182 | - analyzer: `lucene.keyword` 183 | - field: `post_title` 184 | 185 | 186 | 187 | Complete `labs/lab4.js` by using the new index. 188 | 189 | Check if it works by entering `"How to add a modifier to a nested document with mongodb"` in the app. 190 | Don't forget to type in quotes too! 191 | 192 | 193 | 194 | 195 | Now unit test lab4 should pass. 196 | 197 | ## Lab 5: `near` operator 198 | 199 | While range query is supported by DB itself, Atlas Search allows you to find records near a given value like ISODate, number, or GeoJSON point fields. 200 | Moreover unlike range, `near` sorts out the result as per the distance from `origin`. It's especially powerful when with GeoJSON point. 201 | - Use the "language_index" created lab1. (That is dynamic mapping) 202 | 203 | In the `labs/lab5.js`, write a query to find posts created near a given date using `near` operator. 204 | 205 | > **Hint**: 206 | > You need `compound` operator to run 2 operators 207 | > 208 | > - `text` for search term on `post_text` 209 | > - `near` on `post_date`, use 1(ms) for `pivot` value 210 | 211 | Run the app and check by entering a date into the date field and typing a term in the search field. Then hit `search` button. 212 | 213 | Let's search the following record. 214 | 215 | 216 | 217 | Created on "2020-01-29" and it has "disagree" in the post_text. 218 | 219 | 220 | 221 | 222 | Now unit test lab5 should pass. 223 | 224 | ## Lab 6: `queryString` operator 225 | 226 | So far, we've run a simple search. However, one of the common use cases is **Advanced Search** where users can combine searches on multiple fields using logical operators, `AND`, `OR`, `NOT`, or `()`. 227 | 228 | > eg. `TERM1 OR (TERM2 AND TERM3))` 229 | 230 | This can be accomplished in Atlas Search by using [`queryString`](https://www.mongodb.com/docs/atlas/atlas-search/queryString/) operator. 231 | 232 | The `queryString` operator is not supported by language analyzer. So you need to create the 3rd index. 233 | 234 | - index name: `qs_index` 235 | - analyzer: `lucene.standard` 236 | - dynamic: `true` 237 | 238 | 239 | 240 | 241 | Complete `labs/lab6.js` and use the new `qs_index` and `queryString` operator with `defaultPath` to `post_text`. 242 | 243 | 244 | 245 | 246 | Now unit test lab6 should pass. 247 | 248 | ## Lab 7: Facets 249 | 250 | Facets allow users to group and aggregate data based on different properties(fields). 251 | Popular use case is a part of filter as in common e-Commerce product search. 252 | 253 | All facet datatypes(`stringFacet`, `numberFacet`, `dateFacet`) are not covered by dynamic mapping. So index must explicitly declare facet fields even with `dynamic: true`. 254 | 255 | Update `qs_index` and add `StringFacet` to `user.full_name` field, `NumberFacet` to `reply_count` field. 256 | 257 | Add Field Mapping in Edit mode. 258 | 259 | 260 | 261 | The final index looks like this. 262 | 263 | 264 | 265 | Complete `labs/lab7.js` pipeline to use `$searchMeta` stage. 266 | 267 | - First, filter out valid documents by `reply_count` 268 | - define numberFacet 269 | - facet name: `reply_count_facet` 270 | - path: `reply_count` 271 | - boundaries: 0, 5, 10, 15, 20 272 | - default: `"More than 20"` 273 | - define stringFacet 274 | - facet name: `username_facet` 275 | - path: `user.full_name` 276 | - number of buckets: 25 277 | 278 | When you reload the search app, you can see Facets on left menu. 279 | 280 | 281 | 282 | 283 | **note**: Other facet names than set above cannot pass the unit test lab7. 284 | 285 | Now unit test lab7 should pass. 286 | 287 | ## Lab 8: One indexing 288 | 289 | So far, you've created 3 indexes. If your cluster is M0 free tier, it has used up the max number of indexes. 290 | You can combine 3 indexes into one unified version. 291 | 292 | > You can check out the branch, `search-sol8` for updated queries to use one index 293 | 294 | 295 | ## Search in Compass 296 | Atlas Console에서 인덱스를 구성하고 검색을 합니다. 297 | 298 | ### Prerequise 299 | 생성된 데이터 베이스 클러스터에 초기 샘플 데이터를 적재하여 Hands on을 진행 합니다. 300 | 301 | 302 | 303 | Database 메뉴를 클릭 하면 생성된 데이터 베이스 클러스터를 볼 수 있습니다. 최초에는 데이터가 없음으로 클러스터 메뉴 버튼을 "..."을 클릭 하면 추가 메뉴 중 Load Sample Dataset 을 선택 합니다. 304 | 생성이 완료된 후 Browse Collections를 클릭하먄 데이터를 볼 수 있습니다. 305 | 생성된 데이터 베이스는 sample_airbnb외 8개의 데이터베이스가 생성 되고 최소 1개 이상의 컬렉션(테이블)이 생성되게 됩니다. 306 | 307 | 308 | ### Search Index 309 | Sample_mflix 데이터베이스내에 movies 컬렉션에 검색 인덱스를 생성 합니다. 310 | Atlas 콘솔에서 Sample_mflix 에서 movies를 선택 하고 데이터 화면에서 Search Indexes를 선택 합니다. 311 | 312 | 313 | Create Search index를 클릭 합니다. 314 | 315 | 316 | 인덱스 생성 방법은 UI를 이용해서 설정을 이용하여 생성하는 방법과 Json 메시지를 입력하여 만드는 방법이 있습니다. Json으로 작성하는 경우는 custom analyzer를 이용하는 경우에 사용 할 수 있습니다. 단순 검색을 위한 것임으로 Visual Editor를 선택 합니다. 317 | 318 | 319 | 인덱스 이름을 지정하고 인덱스 생성 대상(데이터베이스, 컬렉션)을 선택 합니다. Sample_mflix.movies를 선택 하여 줍니다. 인덱스 이름은 searchidx로 하여 줍니다. 320 | 321 | 322 | 323 | 기본 인덱스를 이용할 것임으로 dynamic mapping 이 on 된 상태 그대로 인덱스를 생성하여 줍니다. 324 | 325 | 326 | 327 | 완료를 하게 되면 인덱스 생성이 진행됩니다. 데이터 양에 따라 2-3분 후에 인덱스가 생성 완료 됩니다. 328 | 329 | 330 | 331 | ### Keyword Search 332 | 영화 제목을 기준으로 검색을 진행 합니다. title 항목에서 "eclipse"를 검색 하여 봅니다. 333 | 334 | MongoDB Compass를 실행 하고 Sample_mflix.movies를 선택 하고 데이터 화면에서 Aggregation을 선택 후 Add stage 버튼을 클릭 하고 search를 생성 하여 줍니다. 335 | 336 | 337 | 338 | 검색용 Query는 title 항목에서 elcipse 를 검색 하는 것으로 전체 Query는 다음과 같습니다. (Compass Aggregation 의 Stage에 넣을 때에는 Stage Search가 선택 되어 있음으로 $search 의 Value 항목만을 입력 하여 줍니다.) 339 | ```` 340 | { 341 | $search: { 342 | index: 'searchidx', 343 | text: { 344 | query: 'eclipse', 345 | path: 'title' 346 | } 347 | } 348 | } 349 | ```` 350 | 상단에 Run 버튼을 클릭 하면 검색에 대한 결과를 볼 수 있습니다. 351 | 352 | 353 | 354 | 355 | ### Title, fullplot 에서 검색 356 | 검색 대상을 늘려서 검색을 진행 합니다. 제목과 줄거리 필드를 대상으로 특정 단어를 검색 합니다. 검색 대상은 "crime" 으로 제목과 줄거리에 해당 단어가 들어간 것을 검색 합니다. 357 | 358 | MongoDB Compass에서 Aggregation을 선택 하고 Add Stage 를 클릭하고 Query를 작성 합니다. (Compass Aggregation 의 Stage에 넣을 때에는 Stage Search가 선택 되어 있음으로 $search 의 Value 항목만을 입력 하여 줍니다.) 359 | ```` 360 | { 361 | $search: { 362 | index: 'searchidx', 363 | text: { 364 | query: 'crime', 365 | path: ['title','fullplot'] 366 | } 367 | } 368 | } 369 | ```` 370 | 371 | 372 | 검색 결과를 확인 합니다. (제목 및 줄거리에 crime 이 포함된 것이며 검색된 횟수가 많은 것이 score가 높이 나오게 됩니다.) 373 | 374 | 375 | 376 | 377 | ### Fuzzy검색 (오타) 378 | 검색 했던 단어 Eclipse로 검색을 진행 하며 오타를 포함하여 검색이 되도록 합니다. "eclopse"로 하여 검색을 진행을 하더라도 "eclipse"와 동일한 검색이 나오는 것을 확인 합니다. 379 | 380 | MongoDB Compass에서 Aggregation을 선택 하고 Add Stage를 클릭하고 Query를 작성 합니다. (Compass Aggregation 의 Stage에 넣을 때에는 Stage Search가 선택 되어 있음으로 $search 의 Value 항목만을 입력 하여 줍니다.) 381 | ```` 382 | { 383 | $search:{ 384 | index: 'searchidx', 385 | text: { 386 | query: 'eclopse', 387 | path: 'title', 388 | fuzzy: { 389 | maxEdits: 1, 390 | maxExpansions: 100 391 | } 392 | } 393 | } 394 | } 395 | ```` 396 | 397 | 398 | 검색 결과를 확인 합니다. 399 | 400 | 401 | 402 | 403 | ### Highlight 404 | 검색한 단어가 포함된 부분을 강조 하기 위해 매치된 부분을 표기 할 수 있도록 검색 결과를 가져 옴니다. 검색어 "eclipse"로 제목과 줄거리에서 검색 합니다. 405 | 406 | MongoDB Compass에서 Aggregation을 선택 하고 Add Stage를 클릭하고 Query를 작성 합니다. (Compass Aggregation 의 Stage에 넣을 때에는 Stage Search가 선택 되어 있음으로 $search 의 Value 항목만을 입력 하여 줍니다.) 407 | ```` 408 | { 409 | $search : 410 | { 411 | "index":"searchidx", 412 | "text": { 413 | "query": "eclipse", 414 | "path": ["title","fullplot"], 415 | }, 416 | "highlight": { 417 | "path": ["title","fullplot"] 418 | } 419 | } 420 | } 421 | ```` 422 | 423 | 추가로 데이터를 확인 하기 위해 보여질 데이터 항목을 Project 를 이용하여 조정 합니다. 전체 데이터 중 제목, 줄거리, 하일라이트 항목과 점수 (score) 만 나오도록 조정 하여 줍니다. 424 | Add Stage를 하여 Stage로 project를 선택 하고 다음 Query를 입력 하여 줍니다. (Compass Aggregation 의 Stage에 넣을 때에는 Stage project가 선택 되어 있음으로 $project 의 Value 항목만을 입력 하여 줍니다.) 425 | 426 | ```` 427 | {$project: 428 | { 429 | "_id" : 0, 430 | "fullplot" : 1, 431 | "title" : 1, 432 | "highlights": {"$meta": "searchHighlights"}, 433 | "score": { 434 | "$meta": "searchScore" 435 | } 436 | } 437 | } 438 | ```` 439 | 440 | 441 | 442 | 작성한 Query를 실행하여 주면 다음과 같은 결과를 확인 할 수 있습니다. 443 | 444 | 445 | 446 | 검색 결과에서 해당 검색 단어가 나온 부분이 type 이 hit 로 표기 되어 집니다. (Texts 의 Value 를 연결하면 해당 내용이 며 hit된 부분이 스트링을 연결 할 때 강조 하도록 UI를 작성 하여 줍니다.) -------------------------------------------------------------------------------- /04.atlas-search/app.js: -------------------------------------------------------------------------------- 1 | var express = require('express'); 2 | var path = require('path'); 3 | var cookieParser = require('cookie-parser'); 4 | var logger = require('morgan'); 5 | 6 | var indexRouter = require('./routes/index'); 7 | var usersRouter = require('./routes/users'); 8 | 9 | var app = express(); 10 | 11 | app.use(logger('dev')); 12 | app.use(express.json()); 13 | app.use(express.urlencoded({ extended: false })); 14 | app.use(cookieParser()); 15 | app.use(express.static(path.join(__dirname, 'public'))); 16 | 17 | app.use('/', indexRouter); 18 | app.use('/users', usersRouter); 19 | 20 | module.exports = app; 21 | -------------------------------------------------------------------------------- /04.atlas-search/bin/www: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /** 4 | * Module dependencies. 5 | */ 6 | 7 | var app = require('../app'); 8 | var debug = require('debug')('search-training-lab:server'); 9 | var http = require('http'); 10 | 11 | /** 12 | * Get port from environment and store in Express. 13 | */ 14 | 15 | var port = normalizePort(process.env.PORT || '3000'); 16 | app.set('port', port); 17 | 18 | /** 19 | * Create HTTP server. 20 | */ 21 | 22 | var server = http.createServer(app); 23 | 24 | /** 25 | * Listen on provided port, on all network interfaces. 26 | */ 27 | 28 | server.listen(port); 29 | server.on('error', onError); 30 | server.on('listening', onListening); 31 | 32 | /** 33 | * Normalize a port into a number, string, or false. 34 | */ 35 | 36 | function normalizePort(val) { 37 | var port = parseInt(val, 10); 38 | 39 | if (isNaN(port)) { 40 | // named pipe 41 | return val; 42 | } 43 | 44 | if (port >= 0) { 45 | // port number 46 | return port; 47 | } 48 | 49 | return false; 50 | } 51 | 52 | /** 53 | * Event listener for HTTP server "error" event. 54 | */ 55 | 56 | function onError(error) { 57 | if (error.syscall !== 'listen') { 58 | throw error; 59 | } 60 | 61 | var bind = typeof port === 'string' 62 | ? 'Pipe ' + port 63 | : 'Port ' + port; 64 | 65 | // handle specific listen errors with friendly messages 66 | switch (error.code) { 67 | case 'EACCES': 68 | console.error(bind + ' requires elevated privileges'); 69 | process.exit(1); 70 | break; 71 | case 'EADDRINUSE': 72 | console.error(bind + ' is already in use'); 73 | process.exit(1); 74 | break; 75 | default: 76 | throw error; 77 | } 78 | } 79 | 80 | /** 81 | * Event listener for HTTP server "listening" event. 82 | */ 83 | 84 | function onListening() { 85 | var addr = server.address(); 86 | var bind = typeof addr === 'string' 87 | ? 'pipe ' + addr 88 | : 'port ' + addr.port; 89 | debug('Listening on ' + bind); 90 | } 91 | -------------------------------------------------------------------------------- /04.atlas-search/data/dump.tar.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/data/dump.tar.gz -------------------------------------------------------------------------------- /04.atlas-search/images/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/.DS_Store -------------------------------------------------------------------------------- /04.atlas-search/images/image01.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image01.png -------------------------------------------------------------------------------- /04.atlas-search/images/image02.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image02.png -------------------------------------------------------------------------------- /04.atlas-search/images/image03.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image03.png -------------------------------------------------------------------------------- /04.atlas-search/images/image04.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image04.png -------------------------------------------------------------------------------- /04.atlas-search/images/image05.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image05.png -------------------------------------------------------------------------------- /04.atlas-search/images/image06.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image06.png -------------------------------------------------------------------------------- /04.atlas-search/images/image07.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image07.png -------------------------------------------------------------------------------- /04.atlas-search/images/image08.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image08.png -------------------------------------------------------------------------------- /04.atlas-search/images/image09.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image09.png -------------------------------------------------------------------------------- /04.atlas-search/images/image10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image10.png -------------------------------------------------------------------------------- /04.atlas-search/images/image11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image11.png -------------------------------------------------------------------------------- /04.atlas-search/images/image12.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image12.png -------------------------------------------------------------------------------- /04.atlas-search/images/image13.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image13.png -------------------------------------------------------------------------------- /04.atlas-search/images/image14.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image14.png -------------------------------------------------------------------------------- /04.atlas-search/images/image15.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image15.png -------------------------------------------------------------------------------- /04.atlas-search/images/image16.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image16.png -------------------------------------------------------------------------------- /04.atlas-search/images/image20.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image20.png -------------------------------------------------------------------------------- /04.atlas-search/images/image21.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image21.png -------------------------------------------------------------------------------- /04.atlas-search/images/image22.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image22.png -------------------------------------------------------------------------------- /04.atlas-search/images/image23.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image23.png -------------------------------------------------------------------------------- /04.atlas-search/images/image24.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image24.png -------------------------------------------------------------------------------- /04.atlas-search/images/image25.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image25.png -------------------------------------------------------------------------------- /04.atlas-search/images/image26.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image26.png -------------------------------------------------------------------------------- /04.atlas-search/images/image27.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image27.png -------------------------------------------------------------------------------- /04.atlas-search/images/image28.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image28.png -------------------------------------------------------------------------------- /04.atlas-search/images/image29.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image29.png -------------------------------------------------------------------------------- /04.atlas-search/images/image30.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image30.png -------------------------------------------------------------------------------- /04.atlas-search/images/image31.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image31.png -------------------------------------------------------------------------------- /04.atlas-search/images/image32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image32.png -------------------------------------------------------------------------------- /04.atlas-search/images/image33.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image33.png -------------------------------------------------------------------------------- /04.atlas-search/images/image34.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/images/image34.png -------------------------------------------------------------------------------- /04.atlas-search/labs/lab1.js: -------------------------------------------------------------------------------- 1 | const { MongoClient } = require("mongodb"); 2 | const config = require("../config"); 3 | 4 | module.exports = { 5 | basicSearch: async function (term) { 6 | mongoClient = new MongoClient(config.atlasURI); 7 | await mongoClient.connect(); 8 | let db = mongoClient.db("forum_db"); 9 | let posts_collection = db.collection("posts"); 10 | 11 | pipeline = [ 12 | // TODO: query MongoDB using Atlas Search to allow searching a term using the English analyzer 13 | // the user's search input is provided in the variable "term" 14 | // limit results to 20 records 15 | ]; 16 | 17 | // return results as an array of json documents 18 | result = { query: pipeline }; 19 | result["data"] = await posts_collection.aggregate(pipeline).toArray(); 20 | await mongoClient.close(); 21 | return result; 22 | }, 23 | }; 24 | -------------------------------------------------------------------------------- /04.atlas-search/labs/lab2.js: -------------------------------------------------------------------------------- 1 | const { MongoClient } = require("mongodb"); 2 | const config = require("../config"); 3 | mongoClient = new MongoClient(config.atlasURI); 4 | module.exports = { 5 | autocomplete: async function (term) { 6 | await mongoClient.connect(); 7 | let db = mongoClient.db("forum_db"); 8 | let posts_collection = db.collection("posts"); 9 | 10 | pipeline = [ 11 | { 12 | $search: { 13 | //TODO - query the data using the autocomplete index. Do not use fuzzy matching, stick with all the autocomplete defaults. 14 | // the user's search input is provided in the variable "term" 15 | }, 16 | }, 17 | { 18 | $limit: 5, 19 | }, 20 | { 21 | $project: { 22 | post_title: 1, 23 | }, 24 | }, 25 | ]; 26 | 27 | // return results as an array of json documents 28 | result = { query: pipeline }; 29 | result["data"] = await posts_collection.aggregate(pipeline).toArray(); 30 | return result; 31 | }, 32 | closeLab2Client: async function () { 33 | return await mongoClient.close(); 34 | }, 35 | }; 36 | -------------------------------------------------------------------------------- /04.atlas-search/labs/lab3.js: -------------------------------------------------------------------------------- 1 | const { MongoClient } = require("mongodb"); 2 | const config = require("../config"); 3 | 4 | module.exports = { 5 | compoundSearch: async function (term) { 6 | mongoClient = new MongoClient(config.atlasURI) 7 | await mongoClient.connect() 8 | let db = mongoClient.db("forum_db") 9 | let posts_collection = db.collection("posts") 10 | 11 | pipeline = [ 12 | // TODO: write a compound query to search for whatever search term the user has entered and filter responses to just MongoDB staff 13 | // the user's search input is provided in the variable "term" 14 | // limit results to 20 records. 15 | 16 | ] 17 | 18 | // return results as an array of json documents 19 | result = { "query": pipeline } 20 | result["data"] = await posts_collection.aggregate(pipeline).toArray() 21 | await mongoClient.close() 22 | return result 23 | } 24 | } -------------------------------------------------------------------------------- /04.atlas-search/labs/lab4.js: -------------------------------------------------------------------------------- 1 | const { MongoClient } = require("mongodb"); 2 | const config = require("../config"); 3 | 4 | module.exports = { 5 | keywordSearch : async function(term){ 6 | mongoClient = new MongoClient(config.atlasURI) 7 | await mongoClient.connect() 8 | let db = mongoClient.db("forum_db") 9 | let posts_collection = db.collection("posts") 10 | 11 | pipeline = [ 12 | // TODO: write a query that uses an index with the keyword analyzer to find a specific term or phrase 13 | // the user's search input is provided in the variable "term". Limit results to 20 records. 14 | 15 | ] 16 | 17 | // return results as an array of json documents 18 | result = {"query":pipeline} 19 | result["data"] = await posts_collection.aggregate(pipeline).toArray() 20 | await mongoClient.close() 21 | return result 22 | } 23 | } -------------------------------------------------------------------------------- /04.atlas-search/labs/lab5.js: -------------------------------------------------------------------------------- 1 | const { MongoClient } = require("mongodb"); 2 | const config = require("../config"); 3 | const { post } = require("../routes"); 4 | 5 | module.exports = { 6 | nearSearch : async function(term, post_date){ 7 | mongoClient = new MongoClient(config.atlasURI) 8 | await mongoClient.connect() 9 | let db = mongoClient.db("forum_db") 10 | let posts_collection = db.collection("posts") 11 | pipeline = [ 12 | // TODO: write a query to find all posts originally written near the user-specified date 13 | // the user's search input is provided in the variable "term" and the date variable is called "post_date" 14 | // limit results to 20 records. 15 | 16 | ] 17 | 18 | // return results as an array of json documents 19 | result = {"query":pipeline} 20 | result["data"] = await posts_collection.aggregate(pipeline).toArray() 21 | await mongoClient.close() 22 | return result 23 | } 24 | } -------------------------------------------------------------------------------- /04.atlas-search/labs/lab6.js: -------------------------------------------------------------------------------- 1 | const { MongoClient } = require("mongodb"); 2 | const config = require("../config"); 3 | mongoClient = new MongoClient(config.atlasURI) 4 | module.exports = { 5 | queryStringSearch : async function(term){ 6 | 7 | await mongoClient.connect() 8 | let db = mongoClient.db("forum_db") 9 | let posts_collection = db.collection("posts") 10 | 11 | pipeline = [ 12 | { 13 | "$search":{ 14 | //TODO - query the data using the query string operator. This will allow users to write advanced logic into their searches. 15 | // the user's search input is provided in the variable "term" 16 | 17 | 18 | 19 | } 20 | }, 21 | { 22 | "$limit":20 23 | } 24 | 25 | ] 26 | 27 | // return results as an array of json documents 28 | result = {"query": pipeline}; 29 | result["data"] = await posts_collection.aggregate(pipeline).toArray() 30 | await mongoClient.close() 31 | return result 32 | } 33 | } -------------------------------------------------------------------------------- /04.atlas-search/labs/lab7.js: -------------------------------------------------------------------------------- 1 | const { MongoClient } = require("mongodb"); 2 | const config = require("../config"); 3 | mongoClient = new MongoClient(config.atlasURI) 4 | module.exports = { 5 | facetSearch: async function () { 6 | 7 | await mongoClient.connect() 8 | let db = mongoClient.db("forum_db") 9 | let posts_collection = db.collection("posts") 10 | 11 | pipeline = [ 12 | { 13 | "$searchMeta": { 14 | //TODO - use facets to create buckets for reply_count and return the number of records in each bucket, as well as the number of documents for each user.full_name 15 | 16 | 17 | } 18 | 19 | }, 20 | { 21 | "$limit": 20 22 | } 23 | 24 | ] 25 | 26 | // return results as an array of json documents 27 | result = { "query": pipeline }; 28 | result["data"] = await posts_collection.aggregate(pipeline).toArray() 29 | await mongoClient.close() 30 | return result 31 | } 32 | } -------------------------------------------------------------------------------- /04.atlas-search/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "search-training-lab", 3 | "version": "0.0.0", 4 | "private": true, 5 | "scripts": { 6 | "start": "node ./bin/www", 7 | "test": "mocha" 8 | }, 9 | "dependencies": { 10 | "bootstrap": "^5.1.3", 11 | "cookie-parser": "~1.4.4", 12 | "debug": "~2.6.9", 13 | "express": "~4.16.1", 14 | "mongodb": "^4.2.2", 15 | "morgan": "~1.9.1", 16 | "nodemon": "^2.0.22" 17 | }, 18 | "devDependencies": { 19 | "mocha": "^9.1.4" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /04.atlas-search/public/assets/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/04.atlas-search/public/assets/favicon.ico -------------------------------------------------------------------------------- /04.atlas-search/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | MongoDB Atlas Search Lab 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 | 38 | 39 |
40 | 41 | 54 | 55 |
56 |

Forum Search Results

57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 67 | 70 | 73 | 76 | 77 |
65 | Title 66 | 68 | Author 69 | 71 | Replies 72 | 74 | Full Text 75 |
78 | 79 |
80 |
81 |
82 | 83 | 84 | 85 | 86 | 87 | 88 | -------------------------------------------------------------------------------- /04.atlas-search/public/javascripts/scripts.js: -------------------------------------------------------------------------------- 1 | function getLatestPosts(){ 2 | fetch("http://localhost:3000/recent").then((result) => { 3 | result.json().then((data) => { 4 | rebuildTable(data) 5 | }) 6 | }) 7 | } 8 | 9 | function runSearch(advanced){ 10 | // runs basic search 11 | searchData = { 12 | "searchText": document.getElementById("searchText").value, 13 | "searchTopicFilters": getTopicList(), 14 | "staffOnly": document.getElementById("staffResponseOnly").checked, 15 | "date": document.getElementById("inptDate").value, 16 | "advanced":advanced 17 | } 18 | 19 | fetch("http://localhost:3000/forumsearch",{ 20 | method:"POST", 21 | headers: { 22 | "Content-Type":"application/json" 23 | }, 24 | body: JSON.stringify(searchData) 25 | }).then((result) => { 26 | result.json().then((data) => { 27 | console.log(data) 28 | rebuildTable(data["data"]) 29 | }) 30 | }) 31 | } 32 | 33 | function runAdvancedSearch(){ 34 | // runs advanced search, query string should be typed into search bar 35 | } 36 | 37 | function rebuildTable(data){ 38 | let table = document.getElementById("resultTable"); 39 | while(table.hasChildNodes()){ 40 | table.removeChild(table.firstChild) 41 | } 42 | 43 | // build rows 44 | data.forEach((doc) => { 45 | let newRow = document.createElement("tr"); 46 | 47 | //title 48 | let newColTitle = document.createElement("td") 49 | newColTitleLink = document.createElement("a") 50 | newColTitleLink.setAttribute("href",doc.post_url) 51 | newColTitleLink.setAttribute("target","_blank") 52 | newColTitleLink.innerHTML = doc.post_title; 53 | newColTitle.appendChild(newColTitleLink) 54 | newRow.appendChild(newColTitle) 55 | 56 | //author 57 | let newColAuthor = document.createElement("td"); 58 | newColAuthor.innerHTML = doc.user.full_name 59 | newRow.appendChild(newColAuthor) 60 | 61 | //reply count 62 | let newColReplies = document.createElement("td"); 63 | newColReplies.innerHTML = doc.reply_count; 64 | newRow.appendChild(newColReplies); 65 | 66 | //post text 67 | let newColPostText = document.createElement("td"); 68 | newColPostText.innerHTML = doc.post_text_html; 69 | newRow.appendChild(newColPostText); 70 | 71 | table.appendChild(newRow) 72 | }) 73 | } 74 | 75 | function getTopicList(){ 76 | // returns list of sidebar topics with checkmark 77 | let topics = [] 78 | let topicCheckboxIDs = [ 79 | "replicaSet", 80 | "shardedCluster", 81 | "atlas", 82 | "community" 83 | ] 84 | 85 | topicCheckboxIDs.forEach((id) => { 86 | let checkbox = document.getElementById(id); 87 | if(checkbox.checked){ 88 | topics.push(checkbox.getAttribute("name")) 89 | } 90 | }) 91 | 92 | return topics 93 | } 94 | 95 | // Autocomplete 96 | function autocomplete(inp) { 97 | /*the autocomplete function takes two arguments, 98 | the text field element and an array of possible autocompleted values:*/ 99 | var currentFocus; 100 | /*execute a function when someone writes in the text field:*/ 101 | inp.addEventListener("input", async function(e) { 102 | var a, b, i, val = this.value; 103 | /*close any already open lists of autocompleted values*/ 104 | closeAllLists(); 105 | if (!val) { return false;} 106 | currentFocus = -1; 107 | /*create a DIV element that will contain the items (values):*/ 108 | a = document.createElement("DIV"); 109 | a.setAttribute("id", this.id + "autocomplete-list"); 110 | a.setAttribute("class", "autocomplete-items"); 111 | /*append the DIV element as a child of the autocomplete container:*/ 112 | this.parentNode.appendChild(a); 113 | arr = await getAutocompleteOptions(val) 114 | console.log(arr) 115 | /*for each item in the array...*/ 116 | for (i = 0; i < arr.length; i++) { 117 | /*check if the item starts with the same letters as the text field value:*/ 118 | if (true) { 119 | /*create a DIV element for each matching element:*/ 120 | b = document.createElement("DIV"); 121 | /*make the matching letters bold:*/ 122 | boldItem = arr[i].replace(val,"" + val + "") 123 | b.innerHTML = boldItem 124 | /*insert a input field that will hold the current array item's value:*/ 125 | b.innerHTML += ""; 126 | /*execute a function when someone clicks on the item value (DIV element):*/ 127 | b.addEventListener("click", function(e) { 128 | /*insert the value for the autocomplete text field:*/ 129 | inp.value = this.getElementsByTagName("input")[0].value; 130 | /*close the list of autocompleted values, 131 | (or any other open lists of autocompleted values:*/ 132 | closeAllLists(); 133 | }); 134 | a.appendChild(b); 135 | } 136 | } 137 | }); 138 | /*execute a function presses a key on the keyboard:*/ 139 | inp.addEventListener("keydown", function(e) { 140 | var x = document.getElementById(this.id + "autocomplete-list"); 141 | if (x) x = x.getElementsByTagName("div"); 142 | if (e.keyCode == 40) { 143 | /*If the arrow DOWN key is pressed, 144 | increase the currentFocus variable:*/ 145 | currentFocus++; 146 | /*and and make the current item more visible:*/ 147 | addActive(x); 148 | } else if (e.keyCode == 38) { //up 149 | /*If the arrow UP key is pressed, 150 | decrease the currentFocus variable:*/ 151 | currentFocus--; 152 | /*and and make the current item more visible:*/ 153 | addActive(x); 154 | } else if (e.keyCode == 13) { 155 | /*If the ENTER key is pressed, prevent the form from being submitted,*/ 156 | e.preventDefault(); 157 | if (currentFocus > -1) { 158 | /*and simulate a click on the "active" item:*/ 159 | if (x) x[currentFocus].click(); 160 | } 161 | } 162 | }); 163 | function addActive(x) { 164 | /*a function to classify an item as "active":*/ 165 | if (!x) return false; 166 | /*start by removing the "active" class on all items:*/ 167 | removeActive(x); 168 | if (currentFocus >= x.length) currentFocus = 0; 169 | if (currentFocus < 0) currentFocus = (x.length - 1); 170 | /*add class "autocomplete-active":*/ 171 | x[currentFocus].classList.add("autocomplete-active"); 172 | } 173 | function removeActive(x) { 174 | /*a function to remove the "active" class from all autocomplete items:*/ 175 | for (var i = 0; i < x.length; i++) { 176 | x[i].classList.remove("autocomplete-active"); 177 | } 178 | } 179 | function closeAllLists(elmnt) { 180 | /*close all autocomplete lists in the document, 181 | except the one passed as an argument:*/ 182 | var x = document.getElementsByClassName("autocomplete-items"); 183 | for (var i = 0; i < x.length; i++) { 184 | if (elmnt != x[i] && elmnt != inp) { 185 | x[i].parentNode.removeChild(x[i]); 186 | } 187 | } 188 | } 189 | /*execute a function when someone clicks in the document:*/ 190 | document.addEventListener("click", function (e) { 191 | closeAllLists(e.target); 192 | }); 193 | } 194 | 195 | async function getAutocompleteOptions(term){ 196 | searchData = { 197 | "searchText": term 198 | } 199 | 200 | fetchResult = await fetch("http://localhost:3000/autocomplete",{ 201 | method:"POST", 202 | headers: { 203 | "Content-Type":"application/json" 204 | }, 205 | body: JSON.stringify(searchData) 206 | }) 207 | fetchResultJson = await fetchResult.json() 208 | console.log(fetchResultJson) 209 | autocompleteList = [] 210 | fetchResultJson["data"].forEach((record) => { 211 | autocompleteList.push(record.post_title) 212 | }) 213 | return autocompleteList 214 | 215 | 216 | // fetch("http://localhost:3000/autocomplete",{ 217 | // method:"POST", 218 | // headers: { 219 | // "Content-Type":"application/json" 220 | // }, 221 | // body: JSON.stringify(searchData) 222 | // }).then((result) => { 223 | // result.json().then((data) => { 224 | // autocompleteList = [] 225 | // data.forEach((record) => { 226 | // autocompleteList.push(record.post_title) 227 | // }) 228 | // return autocompleteList 229 | // }) 230 | // }) 231 | } 232 | 233 | 234 | // Get facets 235 | function getFacets(){ 236 | fetch("http://localhost:3000/facets").then((result) => { 237 | result.json().then((data) => { 238 | rebuildSidebar(data) 239 | }) 240 | }) 241 | } 242 | 243 | function rebuildSidebar(data){ 244 | let facetHTML = document.getElementById("sidebarFacets"); 245 | facetHTML.innerHTML = ""; 246 | data["data"][0]["facet"]["reply_count_facet"]["buckets"].forEach((facet) => { 247 | facetHTML.appendChild(buildFacetItem(facet,"reply_count")); 248 | }) 249 | data["data"][0]["facet"]["username_facet"]["buckets"].forEach((facet) => { 250 | facetHTML.appendChild(buildFacetItem(facet,"user")); 251 | }) 252 | 253 | } 254 | 255 | function buildFacetItem(facet,facet_type){ 256 | console.log(facet) 257 | let newFacetItem = document.createElement("span"); 258 | newFacetItem.setAttribute("class","list-group-item list-group-item-light p-3") 259 | if(facet_type = "reply_count"){ 260 | if(typeof facet["_id"] == "number"){ 261 | newFacetItem.innerHTML = facet["_id"] + " - " + (facet["_id"] + 5) + " replies : " + facet["count"]; 262 | } else { 263 | newFacetItem.innerHTML = facet["_id"] + " replies: " + facet["count"]; 264 | 265 | } 266 | } 267 | 268 | if(facet_type == "user"){ 269 | newFacetItem.innerHTML = facet["_id"] + ": " + facet["count"]; 270 | } 271 | 272 | return newFacetItem 273 | } 274 | 275 | 276 | 277 | autocomplete(document.getElementById("searchText")) 278 | getLatestPosts() 279 | getFacets() -------------------------------------------------------------------------------- /04.atlas-search/public/stylesheets/custom.css: -------------------------------------------------------------------------------- 1 | #resultTable{ 2 | table-layout: fixed; 3 | } 4 | 5 | .resultColumn{ 6 | width:25%; 7 | overflow:auto; 8 | border: 1px solid black; 9 | } -------------------------------------------------------------------------------- /04.atlas-search/routes/index.js: -------------------------------------------------------------------------------- 1 | var express = require('express'); 2 | var router = express.Router(); 3 | 4 | const pbf = require("./prebuilt_functions"); 5 | 6 | 7 | 8 | //import labs 9 | const lab1 = require('../labs/lab1') 10 | const lab2 = require('../labs/lab2') 11 | const lab3 = require('../labs/lab3') 12 | const lab4 = require('../labs/lab4') 13 | const lab5 = require('../labs/lab5') 14 | const lab6 = require('../labs/lab6') 15 | const lab7 = require('../labs/lab7') 16 | 17 | /* GET home page. */ 18 | router.get('/', function (req, res, next) { 19 | res.render('index', { title: 'Express' }); 20 | }); 21 | 22 | // GET 20 recent posts 23 | router.get('/recent', async function (req, res, next) { 24 | posts = await pbf.getRecentPosts(); 25 | res.send(JSON.stringify(posts)); 26 | }) 27 | 28 | /* Lab 1 - Search Bar */ 29 | router.post('/forumsearch', async function (req, res) { 30 | try { 31 | let result; 32 | if (req.body.staffOnly) { 33 | result = await lab3.compoundSearch(req.body.searchText) 34 | } else if (req.body.searchText.indexOf("\"") > -1) { 35 | queryString = req.body.searchText 36 | queryString = queryString.replace("\"", "") 37 | queryString = queryString.replace("\"", "") 38 | console.log(queryString) 39 | result = await lab4.keywordSearch(queryString) 40 | } else if (req.body.date) { 41 | result = await lab5.nearSearch(req.body.searchText, new Date(req.body.date)) 42 | } else if (req.body.advanced) { 43 | result = await lab6.queryStringSearch(req.body.searchText) 44 | } 45 | else { 46 | result = await lab1.basicSearch(req.body.searchText) 47 | } 48 | console.log(req.body) 49 | res.send(JSON.stringify(result)) 50 | } catch (e) { 51 | console.log("It didn't work :(") 52 | console.log(e) 53 | } 54 | 55 | }) 56 | 57 | /* Lab 2 - Autocomplete */ 58 | router.post('/autocomplete', async function (req, res) { 59 | try { 60 | let result = await lab2.autocomplete(req.body.searchText) 61 | console.log(req.body) 62 | res.send(JSON.stringify(result)) 63 | } catch (e) { 64 | console.log("It didn't work :(") 65 | res.send(JSON.stringify({"error":"Autocomplete not functioning."})) 66 | } 67 | 68 | }) 69 | 70 | /* Lab 7 - Facets */ 71 | router.get('/facets', async function (req, res, next) { 72 | try { 73 | facets = await lab7.facetSearch() 74 | res.send(JSON.stringify(facets)); 75 | } catch (e) { 76 | console.log("It didn't work :(") 77 | console.log(e) 78 | } 79 | 80 | }) 81 | 82 | module.exports = router; 83 | -------------------------------------------------------------------------------- /04.atlas-search/routes/prebuilt_functions.js: -------------------------------------------------------------------------------- 1 | const config = require("../config"); 2 | const { MongoClient } = require("mongodb"); 3 | 4 | module.exports = { 5 | getRecentPosts: async function () { 6 | try { 7 | const client = new MongoClient(config.atlasURI); 8 | await client.connect(); 9 | const collection = client.db("forum_db").collection("posts"); 10 | let result = await collection.find().sort().limit(20).toArray() // sort & limit to 20 - may want to build index 11 | await client.close(); 12 | return result 13 | } catch (e) { 14 | console.log("It didn't work :(") 15 | console.log(e) 16 | } 17 | 18 | } 19 | } -------------------------------------------------------------------------------- /04.atlas-search/routes/users.js: -------------------------------------------------------------------------------- 1 | var express = require('express'); 2 | var router = express.Router(); 3 | 4 | /* GET users listing. */ 5 | router.get('/', function(req, res, next) { 6 | res.send('respond with a resource'); 7 | }); 8 | 9 | module.exports = router; 10 | -------------------------------------------------------------------------------- /04.atlas-search/sample_config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "atlasURI":"" 3 | } -------------------------------------------------------------------------------- /04.atlas-search/test/lab1.js: -------------------------------------------------------------------------------- 1 | const assert = require("assert") 2 | const lab1 = require("../labs/lab1") 3 | 4 | describe('lab1', function () { 5 | it('Should return post 3983 (fuzzy) or post 5105 (synonyms)', async function () { 6 | let fuzzy = await lab1.basicSearch("reeplica sat") 7 | let synonyms = await lab1.basicSearch("crustacean") 8 | 9 | let passed = false 10 | if (fuzzy["data"].length > 0) { 11 | console.log("Lab 1: fuzzy search detected") 12 | if (fuzzy["data"][0].post_id == 3983) { 13 | 14 | passed = true 15 | } 16 | } 17 | 18 | if (synonyms["data"].length > 0) { 19 | console.log("Lab 1: synonym search detected") 20 | if (synonyms["data"][0].post_id == 5105) { 21 | passed = true 22 | } 23 | } 24 | 25 | 26 | 27 | assert.equal(passed, true) 28 | }) 29 | }) -------------------------------------------------------------------------------- /04.atlas-search/test/lab2.js: -------------------------------------------------------------------------------- 1 | const assert = require("assert") 2 | const lab2 = require("../labs/lab2") 3 | 4 | describe('lab2', function(){ 5 | it('Should return \"Removing and re-adding a replica set member\"', async function(){ 6 | let results = await lab2.autocomplete("Replic") 7 | assert.equal(results["data"][0].post_title, "Chapter 2 replication:Lab - initiate a replica set locally") 8 | await lab2.closeLab2Client() 9 | }) 10 | 11 | }) -------------------------------------------------------------------------------- /04.atlas-search/test/lab3.js: -------------------------------------------------------------------------------- 1 | const assert = require("assert") 2 | const lab3 = require("../labs/lab3") 3 | 4 | describe('lab3', function(){ 5 | it('Should return \"[Fixed] Lab: Deploy a Replica Set\"', async function(){ 6 | let results = await lab3.compoundSearch("test") 7 | //console.log(results) 8 | assert.equal(results["data"][0].post_title, "[Fixed] Lab: Deploy a Replica Set") 9 | }) 10 | 11 | }) -------------------------------------------------------------------------------- /04.atlas-search/test/lab4.js: -------------------------------------------------------------------------------- 1 | const assert = require("assert") 2 | const lab4 = require("../labs/lab4") 3 | 4 | describe('lab4', function(){ 5 | it('Should return post id 204499', async function(){ 6 | let results = await lab4.keywordSearch("How to add a modifier to a nested document with mongodb") 7 | assert.equal(results["data"][0].post_id, 204499) 8 | }) 9 | }) -------------------------------------------------------------------------------- /04.atlas-search/test/lab5.js: -------------------------------------------------------------------------------- 1 | const assert = require("assert") 2 | const lab5 = require("../labs/lab5") 3 | 4 | describe('lab5', function(){ 5 | it('Should return post 189187', async function(){ 6 | let results = await lab5.nearSearch("shard", new Date("2020-07-04")) 7 | assert.equal(results["data"][0].post_id, 189187) 8 | }) 9 | 10 | }) -------------------------------------------------------------------------------- /04.atlas-search/test/lab6.js: -------------------------------------------------------------------------------- 1 | const assert = require("assert") 2 | const lab6 = require("../labs/lab6") 3 | 4 | describe('lab6', function(){ 5 | it('Should return post 35743', async function(){ 6 | let results = await lab6.queryStringSearch("post_title:community AND post_text:(student OR university)") 7 | //console.log(results["data"][0]) 8 | assert.equal(results["data"][0].post_id, 35743) 9 | }) 10 | 11 | }) -------------------------------------------------------------------------------- /04.atlas-search/test/lab7.js: -------------------------------------------------------------------------------- 1 | const assert = require("assert") 2 | const lab7 = require("../labs/lab7") 3 | 4 | describe('lab7', function(){ 5 | it('Should return 5 buckets for reply count and the first username should be Jon Streets', async function(){ 6 | let results = await lab7.facetSearch(); 7 | assert.equal(results["data"][0]["facet"]["reply_count_facet"]["buckets"].length, 5) 8 | assert.equal(results["data"][0]["facet"]["username_facet"]["buckets"][0]["_id"], "Jon Streets") 9 | }) 10 | 11 | }) -------------------------------------------------------------------------------- /05.atlas-serverless/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (https://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # Typescript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # Yarn Integrity file 55 | .yarn-integrity 56 | 57 | # dotenv environment variables file 58 | .env 59 | 60 | # next.js build output 61 | .next 62 | 63 | config.js 64 | data/dump 65 | package-lock.json 66 | .vscode/ -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Data API(RESTful API) & Functions 4 | 5 | ### [→ Create App Service](#create-app-service) 6 | 7 | ### [→ Data API](#data-api) 8 | 9 | ### [→ Functions](#functions) 10 | 11 | ### [→ Chage-Up](#change-up) 12 | 13 |
14 | 15 | # Create App Service 16 | 17 | - [App 시작](#app-시작) 18 | - [App 환경 설정](#app-환경-설정-optional) 19 | - [인증 활성화](#authentication) 20 | - [Rule 변경](#authorization) 21 | 22 | > 이미 App Service를 생성한 경우, 이 과정은 건너뛰고 [Data API](#data-api)부터 시작 23 | >   24 | 25 | ## App 시작 26 | 27 | ![start](img-app/01.app-template.png) 28 | 29 | - Atlas UI 상단의 `App Services`탭 지입 후 30 | - `Build your own App` 선택 31 | - `Next` 클릭 32 | > Atlas UI 좌측 `Data API` 메뉴에서 시작할 수도 있다 33 | 34 |
35 | 36 | ![select](img-app/02.app-sel.png) 37 | 38 | - App Service의 데이터 소스(클러스터) 지정 39 | - Application 이름 40 | - CP와 리전 결정 후 41 | > 현재, App Service에 한국 리전은 제공되지 않는다 42 | > Global region: 서비스 설정은 선택한 리전에 저장되지만, request는 요청한 클라이언트의 인접 리전에서 서비스 43 | > Single region: 모든 request는 지정한 리전에서만 처리 44 | - `Create App Service` 클릭 45 | - `Welcome to your Application Guides`가 보인다면 그냥 무시(`Close Guides`) 46 | 47 |
48 | 49 | ![dashboard](img-app/03.app-dash.png) 50 | 51 | - App dashboard에서 `App ID`를 확인할 수 있다 (Data API endpioint에 사용) 52 | 53 | ## App 환경 설정 (optional) 54 | 55 | > 새로 생성된 App은 환경이 설정돼 있지 않다 56 | > App의 성격에 따라 다른 환경 변수들을 설정하고 개발에 이용할 수 있다 57 | > 58 | > - Testing 59 | > - Development 60 | > - QA 61 | > - Production 62 | >   63 | >   64 | 65 | - `NO ENVIRONMENT` 클릭 66 | 67 |
68 | 69 | ![env](img-app/04.env.png) 70 | 71 | - 환경 설정 후 (eg. `Testing`) 72 | - `Save Draft` 클릭 73 | - `Understanding Save and Deploy` 모달에서 `Next` 클릭 74 | - `Got it` 클릭 75 | 76 |
77 | 78 | ![env-rev-deploy](img-app/05.env-rev-deploy.png) 79 | 80 | - `REVIEW DRAFT & DEPLOY` 클릭 81 | - `Deployment Draft` 모달에서 `Deploy` 클릭 82 | 83 |
84 | 85 | ![env-deployed](img-app/06.env-deployed.png) 86 | 87 | - 상단의 `Deployment was successful!` 배너와 88 | - 좌측 Apps 메뉴에서 `TEST` 환경 변경됨을 확인 89 | 90 | ## Authentication 91 | 92 | ![api-key](img-data/06.auth-apikey.png) 93 | 94 | - 좌측 `Authentication` 메뉴 진입 후 95 | - 테스트에 가장 만만한 `API Keys`의 `EDIT` 버튼 클릭하여 96 | - `Provider Enabled` 활성화 97 | - `Save Draft` 클릭 98 | 99 |
100 | 101 | ![api-key-deploy](img-data/06.auth-apikey-deploy.png) 102 | 103 | - 상단의 `REVIEW DRAFT & DEPLOY` 클릭 104 | - 이후 나타난 `Deployment Draft` 모달에서 `Deploy` 클릭 105 | - 상단에 `Deployment was successful!` 확인 후 `Create API Key` 클릭 106 | 107 |
108 | 109 | ![api-key-create](img-data/06.auth-apikey-create.png) 110 | 111 | - `Add New API Key` 모달에서 `Api Key Name` 지정 (eg. `keyedUser`) 112 | - `Create` 클릭 113 | 114 |
115 | 116 | ![api-key-save](img-data/06.auth-apikey-save.png) 117 | 118 | - API Key는 다시 확인할 수가 없기 때문에 반드시 생성 후 암기해야 한다 119 | - 혹은, 가끔 암기력이 떨어지는 사람은 복사하여 안전한 장소에 저장하고는 그 사실을 완전히 잊어버리고 항상 새 키를 생성하기도 한다 (본인이면 손~) 120 | 121 | ## Authorization 122 | 123 | > 셋업 시 모든 데이터 소스를 연결했지만 기본 authorization은 모두 `denyAllAccess`이기 때문에 124 | > I/O를 허용하려면 collection 별로 `Rule` 변경이 필요하다 125 | >
126 | 127 |
128 | 129 | ![rule](img-data/04.rule.png) 130 | 131 | - 좌측 `Rules` 메뉴 진입 후 132 | - `sameple_mflix.movies` collection 선택 133 | - 기본 `denyAllAccess`를 `readAll`로 변경 후 134 | - `Add preset role` 클릭 135 | 136 |
137 | 138 | ![rule-deploy](img-data/05.rule-deploy.png) 139 | 140 | - Rule이 `0.readAll` 로 변경 확인 후 141 | - 상단의 `REVIEW DRAFT & DEPLOY` 클릭 후 142 | - 활성화 된 `Deployment Draft` 모달에서 `Deploy` 클릭 후 143 | - 잠시 후 상단에서 `Deployment was successful` 확인 144 | 145 | # Data API 146 | 147 | > **Requisites** 148 | > 149 | > > [[App Service]](#create-app-service) 150 | > > [[Postman]](https://www.postman.com/downloads/) 151 | > 152 | > 테스트는 Bearer token 방식 인증을 사용하지않기 때문에 [[Postman web]](https://web.postman.co) 버전을 사용할 수 없다 153 | > 반드시 로컬 설치버전 필요!! 154 | >
155 | 156 |
157 | 158 | - [Data API 시작](#enable-data-api) 159 | - [Postman 셋업](#postman) 160 | - [테스트](#run-data-api) 161 | 162 |
163 | 164 | > [[Data API 공식 매뉴얼]](https://www.mongodb.com/docs/atlas/app-services/data-api/) 165 | 166 | ## Enable Data API 167 | 168 |
169 | 170 | ![start](img-data/01.start.png) 171 | 172 | - 좌측 Apps 내비게이션 패널에서 `HTTPS Endpoints` 메뉴 진입 173 | - 상단 `Data API` 탭 선택 후 174 | - 아래 `Enable the Data API` 클릭 175 | 176 |
177 | 178 | ![config](img-data/02.config.png) 179 | 180 | - Endpoint URL가 단일 리전용 `https://.aws.data.mongodb-api.com/app//endpoint`임을 확인 181 | - Leave all settings unchanged but 182 | - Check `Create User Upon Authentication` 183 | - `Save` 클릭 후 184 | - `REVIEW DRAFT & DEPLOY` 클릭 185 | 186 |
187 | 188 | ![deploy](img-data/03.deploy.png) 189 | 190 | - 검토 후 `Deploy` 클릭 191 | - 상단의 `Deployment was successful!` 배너 확인 192 | 193 | ## Postman 194 | 195 |
196 | 197 | ![pm new ws](img-fn/08.pm-new-ws.png) 198 | 199 | - Postman에서 새로운 워크스페이스 생성 200 | 201 |
202 | 203 | ![pm import](img-fn/09.pm-import.png) 204 | 205 | - 새로 생성된 워크스페이스에서 `Import` 클릭 206 | 207 |
208 | 209 | ![pm folder](img-fn/10.pm-folder.png) 210 | 211 | - 제공된 Atlast Training repo의 `postman` 폴더 오픈 212 | 213 |
214 | 215 | ![pm folder](img-fn/11.pm-import-2.png) 216 | 217 | - 제공된 Collections, Global env 모두 선택 218 | - `Import` 클릭 219 | 220 |
221 | 222 | ![pm env](img-fn/12.pm-env.png) 223 | 224 | - Import된 설정에서 `Environment` - `Globals` 환경변수 중 225 | `appId`, `region`, `apiKey` 가 현재 App Service의 설정 값과 동일한지 확인 226 | 227 | ## Run Data API 228 | 229 |
230 | 231 | ![run req](img-data/08.req.png) 232 | 233 | - `auth-apiKey` collection의 `findOne` request를 실행(`Send`) 후 결과 확인 234 | - 다른 두 requests (`find`, `aggregate`)도 실행 후 결과를 확인한다 235 | 236 | # Functions 237 | 238 |
239 | 240 | - [Custom HTTPs Endpoints 설정](#https-endpoints) 241 | - [Function 정의](#new-funtion) 242 | - [Function 테스트](#query-custom-endpoint) 243 | 244 |
245 | 246 | > [[Data API 공식 매뉴얼]](https://www.mongodb.com/docs/atlas/app-services/data-api/custom-endpoints/) 247 | > [[Functions 공식 매뉴얼]](https://www.mongodb.com/docs/atlas/app-services/functions/) 248 | 249 | ## HTTPs Endpoints 250 | 251 | ![add endpoint](img-fn/01.add-hep.png) 252 | 253 | - 좌측 내비게이션 메뉴의 `HTTPS Endpoints` 진입 후 254 | - `Add An Endpoint` 클릭 255 | 256 |
257 | 258 | #### ![endpoint url](img-fn/02.add-hep-url.png) 259 | 260 | - `Route`로 `/genre` 지정 261 | - Endpoint가 Route를 포함한 단일 리전 URL임을 확인 262 | `https://.aws.data.mongodb-api.com/app//endpoint/` 263 | - `HTTP Method`로 Data API와 동일하게 `POST` 사용 264 | - `Respond With Result`를 켠다 265 | 266 |
267 | 268 | #### ![new funtion](img-fn/03.new-fn.png) 269 | 270 | - `+ New Function` 선택 271 | > App Service 전체 메뉴 `Functions`에서 임의의 함수를 추가할 수도 있지만 272 | > Custom HTTPs endpoint의 경우 https function template이 제공된다 273 | - `Function Name`을 `getMoviesByGenre`로 지정 후 274 | - `Function` body를 아래 코드로 교체한다 275 | 276 | ``` 277 | // This function is the endpoint's request handler. 278 | exports = async function({ query, headers, body}, response) { 279 | let {genre, limit} = query; 280 | 281 | if (limit === undefined) { 282 | limit = 5; 283 | } else { 284 | limit = parseInt(limit) 285 | } 286 | 287 | const contentTypes = headers["Content-Type"]; 288 | const reqBody = body; 289 | 290 | console.log("genre, limit: ", genre, limit); 291 | console.log("Content-Type:", JSON.stringify(contentTypes)); 292 | console.log("Request body:", reqBody); 293 | 294 | const doc = await context.services.get("mongodb-atlas") 295 | .db("sample_mflix").collection("movies") 296 | .find( 297 | { 298 | genres: {$in: [genre]} 299 | } 300 | ).limit(limit).toArray(); 301 | const res = { 302 | nMovies: doc.length, 303 | movies: doc 304 | } 305 | 306 | return res; 307 | }; 308 | ``` 309 | 310 |
311 | 312 | ![set param](img-fn/04.new-fn-param.png) 313 | 314 | - 오른쪽 아래 화살표 버튼을 클릭해서 `Testing Console`을 활성화 시킨 후 315 | - 테스트를 위한 query param을 세팅하고 316 | - `Run` 클릭 317 | 318 | > Testing console을 이용한 테스트 시 [[Example]](https://www.mongodb.com/docs/atlas/app-services/data-api/custom-endpoints/#example) 처럼 `response.setBody()`를 이용해 return대신 직접 body에 결과를 처리하려면 `HTTPResponse` instance를 function에 argument로 넘겨줘야 한다 319 | > **예** 320 | > 321 | > ``` 322 | > exports( 323 | > { 324 | > query { 325 | > genre: 'Drama', limit: 1 326 | > }, 327 | > headers: {'Content-Type': 'application/json'} 328 | > }, 329 | > new HTTPResponse() 330 | > ) 331 | > ``` 332 | > 333 | > request body를 Testing console에서 export하려면 아래 예제처럼 [`BSON.Binary.fromBase64`](https://www.mongodb.com/docs/atlas/app-services/functions/globals/#mongodb-method-BSON.Binary.fromBase64)를 이용할 수 있다 334 | > 335 | > ``` 336 | > exports( 337 | > { 338 | > query { 339 | > genre: 'Drama', limit: 1 340 | > }, 341 | > headers: {'Content-Type': 'application/json'}, 342 | > body: BSON.Binary.fromBase64() 343 | > }, 344 | > new HTTPResponse() 345 | > ) 346 | > ``` 347 | > 348 | > 단, Testing console은 `btoa()`를 지원하지 않기 때문에 콘솔에서 직접 base64 string으로 전환할 수가 없기 때문에 ``는 외부에서 변환해야 한다 349 | > 350 | > **예** 351 | > 352 | > `btoa(JSON.stringify()) + "="` 353 | > 354 | > > `"="`: `BSON.Binary`에서 요구하는 padding 값 355 | 356 |
357 | 358 | ![fn res](img-fn/05.new-fn-res.png) 359 | 360 | - `Result` 탭에서 기대했던 결과 확인 361 | 362 |
363 | 364 | ![save](img-fn/06.save.png) 365 | 366 | - `Fetch Custom User Data`와 367 | - `Create User Upon Authentication`을 enable 시킨 후 368 | - `Save Draft` 클릭 369 | 370 |
371 | 372 | - 상단 `REVIEW DRAFT & DEPLOY` 배너 버튼을 클릭 후 373 | 374 |
375 | 376 | - `Deployment Draft` 모달에서 최종 검토를 하고 377 | - 모달 오른쪽 아래 `Deploy` 버튼 클릭 378 | 379 |
380 | 381 | - 상단에 `Deployment was successful!` 배너가 나오면 성공 382 | 383 | ## Query Custom Endpoint 384 | 385 | ![query custom https endpoint](img-fn/07.query-ep.png) 386 | 387 | - Postman Collection `customHTTPsEndpoint`의 `genre` request를 실행한다 388 | - query param, `genra`, `limit` 을 변경해서 검색 조건을 변경할 수 있다 389 | 390 |
391 | 392 | # Change Up 393 | 394 | - [User ID Authentication Method](#auth-user-id) 395 | - [Unauthroized Request Test](#unauthorized-request) 396 | - [Rules Filters](#filters) 397 | 398 |
399 | 400 | ## Auth User Id 401 | 402 | 지금까지의 테스트는 `apiKey`를 사용한 사용자 인증을 사용해왔다 403 | DATA API/HTTPs Endpoin는 기본 application authentication 외에 404 | 모듈이 자체적으로 사용하는 사용자 인증 방법이 지원된다 405 | 406 | - User Id 407 | - Script 408 | 409 | 이번 테스트에서는 클라이언트가 사용자 인증을 하지 않고 410 | DATA API가 모든 query에 대해 사용자를 지정하는 인증 `User Id` 인증 방식을 테스트 해본다 411 | 412 |
413 | 414 | ![no apiKey](img-cu/02.auth-uid-no-apikey.png) 415 | 416 | - Postman `unauth-apiKey` collection에서 임의의 query를 수행한다 417 | > 해당 collection의 모든 query는 HTTP header에 인증을 위한 `apiKey`를 포함하지 않는다 418 | - 결과는 `no authentication methods were specified` 임을 확인할 수 있다 419 | 420 |
421 | 422 | ![userId select](img-cu/02.auth-uid-select.png) 423 | 424 | - App Service 좌측 내비게이션 바에서 `HTTPS Endpoints`를 선택 후 425 | - `Authentication Options`에서 `User Id`를 선택한다 426 | - 아래 `Select User` 클릭 427 | 428 |
429 | 430 | ![select user](img-cu/02.auth-uid-select-user.png) 431 | 432 | - 모달에서 등록돼 있는 사용자를 지정 후 433 | - `Select User` 클릭 434 | 435 |
436 | 437 | ![uid auth save](img-cu/02.auth-uid-save.png) 438 | 439 | - 돌아온 설정에서 선택한 사용자(예. `keyedUser`)가 올바로 선택됐음을 확인 후 440 | - `Save` 클릭 441 | 442 |
443 | 444 | ![uid auth ok](img-cu/02.auth-uid-ok.png) 445 | 446 | - 다시 Postman에서 동일한 query를 실행하면 인증이 성공해서 쿼리 결과를 확인할 수 있다 447 | - 남은 모든 query도 인증 문제없이 수행됨을 확인하다 448 | 449 | ## Unauthorized Request 450 | 451 | ![users rule](img-cu/01.unahor-req-rule.png) 452 | 453 | > App Service를 통한 DB query는 Rules의 Role에 기반한 RBAC로 제어된다 454 | > `sample_mflix` DB에서 `movies` collection외에는 Rule을 따로 지정하지 않았다 455 | > 즉, 다른 collection들은 모두 기본 Role인 `denyAllAccess`로 남아 있다 456 | > 이 상태에서 query request를 하게되면... 457 | 458 | ![users rule](img-cu/01.unauthor-req-fail.png) 459 | 460 | - Postman `auth-apiKey` collection에서 `find_Unauthorized` 를 실행하면 461 | - apiKey를 사용해 인증은 성공하지만 `no rule exists` 에러를 확인할 수 있다 462 | > query는 `users` collection 대상의 request 463 | 464 | ## Filters 465 | 466 | App Service의 Rules는 Role에 기반한 collection 수준의 접근제어와 함계 467 | Filter를 이용한 정밀한 document 접근 제어와 document의 field 접근제어가 가능하다 468 | 469 |
470 | 471 | ![filter start](img-cu/03.filter-start.png) 472 | 473 | - App Service 좌측 내비메뉴에서 `DATA ACCESS` 아래 `Rules` 진입 474 | - `sample_mflix.movies` 선택 475 | - `Filter` 탭 선택 후 476 | - `Add a filter` 클릭 477 | 478 |
479 | 480 | ![filter config](img-cu/03.filter-config.png) 481 | 482 | - `Filter name` 지정 (eg. `familyMovies`) 483 | - `Apply When` 지정 484 | ``` 485 | { 486 | "%%user.custom_data.age": { 487 | "$exists": false 488 | } 489 | } 490 | ``` 491 | > Filter 적용 조건을 정한다 492 | > `{}`: always true (alway apply) 493 | > 494 | > 테스트에서 `custom_data`를 설정하지 않았기 때문에 조건은 항상 true 495 | > [[Rule Expressions 참조]](https://www.mongodb.com/docs/atlas/app-services/rules/expressions/#rule-expressions) 496 | - `Query` 입력 497 | ``` 498 | { 499 | "rated": {"$ne": "R"} 500 | } 501 | ``` 502 | > `find` 조회 query에서 사용하는 filter pattern 사용 503 | > `{}`: 모든 documents 504 | > 505 | > 성인 등급 영상을 제외하는 조건 506 | - `Projection` 입력 507 | ``` 508 | { 509 | "_id": 0, 510 | "title": 1, 511 | "plot": 1, 512 | "imdb": 1, 513 | "genres": 1 514 | } 515 | ``` 516 | - `Save Draft` 클릭 517 | - 상단의 `REVIEW DRAFT & DEPLOY` 클릭 518 | 519 |
520 | 521 | - `Deploy` 클릭 522 | 523 |
524 | 525 | - 상단에 `Deployment was successful!` 확인되면 성공 526 | 527 |
528 | 529 | ![filter query run](img-cu/03.filter-query.png) 530 | 531 | - Postman `auth-apiKey` collection의 `findNoFilter` 수행 532 | > query는 `"filter"` 필드를 지정하지 않았지만 533 | - 결과는 Rules의 Filter에저 지정한 `Projection`에 따라 한정된 필드만을 반환한다 534 | 535 |
536 | 537 | ![filter query R](img-cu/03.filter-nodocs.png) 538 | 539 | - Postman에서 `findRratedMovies` query 수행 540 | - query는 `"R"` 등급의 모든 영화를 찾으려 하지만 App Service Filter에 의해 모두 걸러진 후이기 때문에 검색되는 document가 없다 541 | -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-app/01.app-template.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-app/01.app-template.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-app/02.app-sel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-app/02.app-sel.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-app/03.app-dash.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-app/03.app-dash.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-app/04.env.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-app/04.env.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-app/05.env-rev-deploy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-app/05.env-rev-deploy.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-app/06.env-deployed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-app/06.env-deployed.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/01.unahor-req-rule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/01.unahor-req-rule.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/01.unauthor-req-fail.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/01.unauthor-req-fail.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/02.auth-uid-no-apikey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/02.auth-uid-no-apikey.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/02.auth-uid-ok.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/02.auth-uid-ok.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/02.auth-uid-save.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/02.auth-uid-save.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/02.auth-uid-select-user.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/02.auth-uid-select-user.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/02.auth-uid-select.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/02.auth-uid-select.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/03.filter-config.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/03.filter-config.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/03.filter-nodocs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/03.filter-nodocs.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/03.filter-query.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/03.filter-query.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-cu/03.filter-start.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-cu/03.filter-start.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/01.start.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/01.start.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/02.config.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/02.config.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/03.deploy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/03.deploy.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/04.rule.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/04.rule.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/05.rule-deploy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/05.rule-deploy.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/06.auth-apikey-create.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/06.auth-apikey-create.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/06.auth-apikey-deploy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/06.auth-apikey-deploy.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/06.auth-apikey-save.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/06.auth-apikey-save.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/06.auth-apikey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/06.auth-apikey.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/07.var-apikey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/07.var-apikey.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-data/08.req.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-data/08.req.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/01.add-hep.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/01.add-hep.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/02.add-hep-url.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/02.add-hep-url.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/03.new-fn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/03.new-fn.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/04.new-fn-param.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/04.new-fn-param.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/05.new-fn-res.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/05.new-fn-res.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/06.save.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/06.save.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/07.query-ep.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/07.query-ep.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/08.pm-new-ws.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/08.pm-new-ws.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/09.pm-import.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/09.pm-import.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/10.pm-folder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/10.pm-folder.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/11.pm-import-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/11.pm-import-2.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/img-fn/12.pm-env.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/01.dataapi-function/img-fn/12.pm-env.png -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/postman/auth-apiKey.postman_collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "info": { 3 | "_postman_id": "236b1490-16ec-42ce-8c71-df488185c91f", 4 | "name": "auth-apiKey", 5 | "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", 6 | "_exporter_id": "1454403" 7 | }, 8 | "item": [ 9 | { 10 | "name": "findOne", 11 | "request": { 12 | "method": "POST", 13 | "header": [ 14 | { 15 | "key": "apiKey", 16 | "value": "{{apiKey}}", 17 | "type": "text" 18 | } 19 | ], 20 | "body": { 21 | "mode": "raw", 22 | "raw": "{\n \"dataSource\": \"{{dataSource}}\",\n \"database\": \"{{database}}\",\n \"collection\": \"{{collection}}\",\n \"filter\": {\n \"title\": \"The Matrix\"\n }\n}", 23 | "options": { 24 | "raw": { 25 | "language": "json" 26 | } 27 | } 28 | }, 29 | "url": { 30 | "raw": "{{baseUrl}}/data/v1/action/findOne", 31 | "host": [ 32 | "{{baseUrl}}" 33 | ], 34 | "path": [ 35 | "data", 36 | "v1", 37 | "action", 38 | "findOne" 39 | ] 40 | } 41 | }, 42 | "response": [] 43 | }, 44 | { 45 | "name": "find_Unauthorized", 46 | "request": { 47 | "method": "POST", 48 | "header": [ 49 | { 50 | "key": "apiKey", 51 | "value": "{{apiKey}}", 52 | "type": "text" 53 | } 54 | ], 55 | "body": { 56 | "mode": "raw", 57 | "raw": "{\n \"dataSource\": \"{{dataSource}}\",\n \"database\": \"{{database}}\",\n \"collection\": \"users\",\n \"limit\": 10\n}", 58 | "options": { 59 | "raw": { 60 | "language": "json" 61 | } 62 | } 63 | }, 64 | "url": { 65 | "raw": "{{baseUrl}}/data/v1/action/find", 66 | "host": [ 67 | "{{baseUrl}}" 68 | ], 69 | "path": [ 70 | "data", 71 | "v1", 72 | "action", 73 | "find" 74 | ] 75 | } 76 | }, 77 | "response": [] 78 | }, 79 | { 80 | "name": "find", 81 | "request": { 82 | "method": "POST", 83 | "header": [ 84 | { 85 | "key": "apiKey", 86 | "value": "{{apiKey}}", 87 | "type": "text" 88 | } 89 | ], 90 | "body": { 91 | "mode": "raw", 92 | "raw": "{\n \"dataSource\": \"{{dataSource}}\",\n \"database\": \"{{database}}\",\n \"collection\": \"{{collection}}\",\n \"filter\": {\n \"type\": \"series\"\n },\n \"sort\": {\"year\": 1},\n \"limit\": 2\n}", 93 | "options": { 94 | "raw": { 95 | "language": "json" 96 | } 97 | } 98 | }, 99 | "url": { 100 | "raw": "{{baseUrl}}/data/v1/action/find", 101 | "host": [ 102 | "{{baseUrl}}" 103 | ], 104 | "path": [ 105 | "data", 106 | "v1", 107 | "action", 108 | "find" 109 | ] 110 | } 111 | }, 112 | "response": [] 113 | }, 114 | { 115 | "name": "findRratedMovies", 116 | "request": { 117 | "method": "POST", 118 | "header": [ 119 | { 120 | "key": "apiKey", 121 | "value": "{{apiKey}}", 122 | "type": "text" 123 | } 124 | ], 125 | "body": { 126 | "mode": "raw", 127 | "raw": "{\n \"dataSource\": \"{{dataSource}}\",\n \"database\": \"{{database}}\",\n \"collection\": \"{{collection}}\",\n \"filter\": {\n \"rated\": \"R\"\n }\n}", 128 | "options": { 129 | "raw": { 130 | "language": "json" 131 | } 132 | } 133 | }, 134 | "url": { 135 | "raw": "{{baseUrl}}/data/v1/action/find", 136 | "host": [ 137 | "{{baseUrl}}" 138 | ], 139 | "path": [ 140 | "data", 141 | "v1", 142 | "action", 143 | "find" 144 | ] 145 | } 146 | }, 147 | "response": [] 148 | }, 149 | { 150 | "name": "findNoFilter", 151 | "request": { 152 | "method": "POST", 153 | "header": [ 154 | { 155 | "key": "apiKey", 156 | "value": "{{apiKey}}", 157 | "type": "text" 158 | } 159 | ], 160 | "body": { 161 | "mode": "raw", 162 | "raw": "{\n \"dataSource\": \"{{dataSource}}\",\n \"database\": \"{{database}}\",\n \"collection\": \"{{collection}}\",\n \"limit\": 10\n}", 163 | "options": { 164 | "raw": { 165 | "language": "json" 166 | } 167 | } 168 | }, 169 | "url": { 170 | "raw": "{{baseUrl}}/data/v1/action/find", 171 | "host": [ 172 | "{{baseUrl}}" 173 | ], 174 | "path": [ 175 | "data", 176 | "v1", 177 | "action", 178 | "find" 179 | ] 180 | } 181 | }, 182 | "response": [] 183 | }, 184 | { 185 | "name": "aggregate", 186 | "request": { 187 | "method": "POST", 188 | "header": [ 189 | { 190 | "key": "apiKey", 191 | "value": "{{apiKey}}", 192 | "type": "text" 193 | } 194 | ], 195 | "body": { 196 | "mode": "raw", 197 | "raw": "{\n \"dataSource\": \"{{dataSource}}\",\n \"database\": \"{{database}}\",\n \"collection\": \"{{collection}}\",\n \"pipeline\": [\n {\n \"$group\": {\n \"_id\": \"$year\",\n \"count\": {\"$sum\": 1}\n }\n }, {\n \"$sort\": {\"count\": -1}\n }\n ]\n}", 198 | "options": { 199 | "raw": { 200 | "language": "json" 201 | } 202 | } 203 | }, 204 | "url": { 205 | "raw": "{{baseUrl}}/data/v1/action/aggregate", 206 | "host": [ 207 | "{{baseUrl}}" 208 | ], 209 | "path": [ 210 | "data", 211 | "v1", 212 | "action", 213 | "aggregate" 214 | ] 215 | } 216 | }, 217 | "response": [] 218 | } 219 | ], 220 | "event": [ 221 | { 222 | "listen": "prerequest", 223 | "script": { 224 | "type": "text/javascript", 225 | "exec": [ 226 | "" 227 | ] 228 | } 229 | }, 230 | { 231 | "listen": "test", 232 | "script": { 233 | "type": "text/javascript", 234 | "exec": [ 235 | "" 236 | ] 237 | } 238 | } 239 | ] 240 | } -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/postman/customHTTPsEndpoint.postman_collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "info": { 3 | "_postman_id": "cadcf712-7a54-489d-84b5-2dd43996c103", 4 | "name": "customHTTPsEndpoint", 5 | "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", 6 | "_exporter_id": "1454403" 7 | }, 8 | "item": [ 9 | { 10 | "name": "genre", 11 | "request": { 12 | "method": "POST", 13 | "header": [ 14 | { 15 | "key": "apiKey", 16 | "value": "{{apiKey}}", 17 | "type": "text" 18 | } 19 | ], 20 | "body": { 21 | "mode": "raw", 22 | "raw": "", 23 | "options": { 24 | "raw": { 25 | "language": "json" 26 | } 27 | } 28 | }, 29 | "url": { 30 | "raw": "{{baseUrl}}/genre?genre=Drama&limit=1", 31 | "host": [ 32 | "{{baseUrl}}" 33 | ], 34 | "path": [ 35 | "genre" 36 | ], 37 | "query": [ 38 | { 39 | "key": "genre", 40 | "value": "Drama" 41 | }, 42 | { 43 | "key": "limit", 44 | "value": "1" 45 | } 46 | ] 47 | } 48 | }, 49 | "response": [] 50 | } 51 | ] 52 | } -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/postman/unauth-apiKey.postman_collection.json: -------------------------------------------------------------------------------- 1 | { 2 | "info": { 3 | "_postman_id": "dc054235-0a3f-47ee-b3ac-cba063703176", 4 | "name": "unauth-apiKey", 5 | "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json", 6 | "_exporter_id": "1454403" 7 | }, 8 | "item": [ 9 | { 10 | "name": "findOne", 11 | "request": { 12 | "method": "POST", 13 | "header": [], 14 | "body": { 15 | "mode": "raw", 16 | "raw": "{\n \"dataSource\": \"{{dataSource}}\",\n \"database\": \"{{database}}\",\n \"collection\": \"{{collection}}\",\n \"filter\": {\n \"title\": \"The Matrix\"\n }\n}", 17 | "options": { 18 | "raw": { 19 | "language": "json" 20 | } 21 | } 22 | }, 23 | "url": { 24 | "raw": "{{baseUrl}}/data/v1/action/findOne", 25 | "host": [ 26 | "{{baseUrl}}" 27 | ], 28 | "path": [ 29 | "data", 30 | "v1", 31 | "action", 32 | "findOne" 33 | ] 34 | } 35 | }, 36 | "response": [] 37 | }, 38 | { 39 | "name": "find_Unauthorized", 40 | "request": { 41 | "method": "POST", 42 | "header": [], 43 | "body": { 44 | "mode": "raw", 45 | "raw": "{\n \"dataSource\": \"{{dataSource}}\",\n \"database\": \"{{database}}\",\n \"collection\": \"users\",\n \"limit\": 10\n}", 46 | "options": { 47 | "raw": { 48 | "language": "json" 49 | } 50 | } 51 | }, 52 | "url": { 53 | "raw": "{{baseUrl}}/data/v1/action/find", 54 | "host": [ 55 | "{{baseUrl}}" 56 | ], 57 | "path": [ 58 | "data", 59 | "v1", 60 | "action", 61 | "find" 62 | ] 63 | } 64 | }, 65 | "response": [] 66 | }, 67 | { 68 | "name": "find", 69 | "request": { 70 | "method": "POST", 71 | "header": [], 72 | "body": { 73 | "mode": "raw", 74 | "raw": "{\n \"dataSource\": \"{{dataSource}}\",\n \"database\": \"{{database}}\",\n \"collection\": \"{{collection}}\",\n \"filter\": {\n \"type\": \"series\"\n },\n \"sort\": {\"year\": 1},\n \"limit\": 2\n}", 75 | "options": { 76 | "raw": { 77 | "language": "json" 78 | } 79 | } 80 | }, 81 | "url": { 82 | "raw": "{{baseUrl}}/data/v1/action/find", 83 | "host": [ 84 | "{{baseUrl}}" 85 | ], 86 | "path": [ 87 | "data", 88 | "v1", 89 | "action", 90 | "find" 91 | ] 92 | } 93 | }, 94 | "response": [] 95 | }, 96 | { 97 | "name": "aggregate", 98 | "request": { 99 | "method": "POST", 100 | "header": [], 101 | "body": { 102 | "mode": "raw", 103 | "raw": "{\n \"dataSource\": \"{{dataSource}}\",\n \"database\": \"{{database}}\",\n \"collection\": \"{{collection}}\",\n \"pipeline\": [\n {\n \"$group\": {\n \"_id\": \"$year\",\n \"count\": {\"$sum\": 1}\n }\n }, {\n \"$sort\": {\"count\": -1}\n }\n ]\n}", 104 | "options": { 105 | "raw": { 106 | "language": "json" 107 | } 108 | } 109 | }, 110 | "url": { 111 | "raw": "{{baseUrl}}/data/v1/action/aggregate", 112 | "host": [ 113 | "{{baseUrl}}" 114 | ], 115 | "path": [ 116 | "data", 117 | "v1", 118 | "action", 119 | "aggregate" 120 | ] 121 | } 122 | }, 123 | "response": [] 124 | } 125 | ], 126 | "event": [ 127 | { 128 | "listen": "prerequest", 129 | "script": { 130 | "type": "text/javascript", 131 | "exec": [ 132 | "" 133 | ] 134 | } 135 | }, 136 | { 137 | "listen": "test", 138 | "script": { 139 | "type": "text/javascript", 140 | "exec": [ 141 | "" 142 | ] 143 | } 144 | } 145 | ] 146 | } -------------------------------------------------------------------------------- /05.atlas-serverless/01.dataapi-function/postman/workspace.postman_globals.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "9d794c16-7a32-4185-8687-89445ad4a92d", 3 | "values": [ 4 | { 5 | "key": "appId", 6 | "value": "rest-qatia", 7 | "type": "default", 8 | "enabled": true 9 | }, 10 | { 11 | "key": "region", 12 | "value": "ap-southeast-1", 13 | "type": "default", 14 | "enabled": true 15 | }, 16 | { 17 | "key": "baseUrl", 18 | "value": "https://{{region}}.aws.data.mongodb-api.com/app/{{appId}}/endpoint", 19 | "type": "default", 20 | "enabled": true 21 | }, 22 | { 23 | "key": "dataSource", 24 | "value": "mongodb-atlas", 25 | "type": "default", 26 | "enabled": true 27 | }, 28 | { 29 | "key": "database", 30 | "value": "sample_mflix", 31 | "type": "default", 32 | "enabled": true 33 | }, 34 | { 35 | "key": "collection", 36 | "value": "movies", 37 | "type": "default", 38 | "enabled": true 39 | }, 40 | { 41 | "key": "apiKey", 42 | "value": "eTnER1A6H5NwqfCusaw16hDyqUDOnPsPk8HCZYgAQjAw2fma9s1n8tSBijrskpmQ", 43 | "type": "default", 44 | "enabled": true 45 | } 46 | ], 47 | "name": "Globals", 48 | "_postman_variable_scope": "globals", 49 | "_postman_exported_at": "2023-02-14T14:21:53.902Z", 50 | "_postman_exported_using": "Postman/10.10.1" 51 | } -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # Data Federation & Online Archive Hands-on 4 | 5 | ### [→ Data Federation](#data-federation) 6 | 7 | ### [→ Online Archive](#online-archive) 8 | 9 |
10 | 11 | # Data Federation 12 | 13 | - [Federation Instance 생성](#federation-instance-생성) 14 | - [Hot Storage 추가](#hot-storage-추가) 15 | - [Cold Storage 추가](#cold-storage-추가) 16 | - [Federated Query 시험](#federated-query-시험) 17 | 18 | > [ [Data Federation 공식 매뉴얼](https://www.mongodb.com/docs/atlas/data-federation/overview/) ] 19 | 20 | ## Federation Instance 생성 21 | 22 | ![01](img-fed/01.enter-atlasui.png) 23 | 24 | - [Atlas UI](https://cloud.mongodb.com) 로그인 후 왼쪽 `SERVICES` 메뉴의 `Data Federation` 으로 진입 25 | 26 | ![02](img-fed/02.enter-fed.png) 27 | 28 | - `Create Federated Database` 클릭 29 | 30 | ![03](img-fed/03.rename.png) 31 | 32 | - Instance 정보 변경 33 | - `FederatedDatabaseInstance0` 이름 변경 34 | > 예) `fdb0` 35 | - `Add Data Sources` 클릭 36 | 37 | ## Hot Storage 연결 38 | 39 | ![04](img-fed/04.cluster.png) 40 | 41 | - `Atlas Cluster` 선택 42 | - 운영중인 클러스터 선택 43 | - 클러스터 내의 database와 collection 선택 (예: `sample_mflix.movies`) 44 | > 샘플 데이터 준비는 [pre-work](https://github.com/MongoDBAtlas/MongoDBAtlasTraining/tree/main/00.pre-work#%EC%B4%88%EA%B8%B0-%EB%8D%B0%EC%9D%B4%ED%84%B0-%EB%A1%9C%EB%93%9C) 참조 45 | - `Next` 클릭 46 | 47 | ![05](img-fed/05.add-cluster.png) 48 | 49 | - 추가된 collection을 federated DB에 끌어서 연결 50 | - federated instance의 database, collection 정보 변경 51 | - `VirtualDatabase0` → `mflix` 52 | - `VirtualCollection0` → `movies` 53 | 54 | > Hot Storage를 federated DB에 연결 완료 55 | 56 | ## Cold Storage 연결 57 | 58 | ![06](img-fed/06.enable-s3.png) 59 | 60 | - 필터를 AWS S3로 선택 61 | - `Add Sample Data` 클릭 62 | 63 | ![07](img-fed/07.add-s3.png) 64 | 65 | - `/mflix/movies/{type string}/{year int}/` path를 끌어서 federated DB에 연결 66 | > 파일은 `movies` collection의 `type`과 `year`를 partition key로 사용해 컬럼 방식으로 저장됐음을 알수 있다. 67 | > 이러한 컬럼 파티션 저장 방식은 object storage 검색 성능을 향상시킬수 있으며 68 | > Data Lake 나 Online Archive 생성 시 파티션 키 선택에 적용할 수 있다. 69 | - `Save` 클릭 70 | 71 | > Cold Storage를 federated DB에 연결 완료 72 | 73 | ## Federated Query 시험 74 | 75 | ### 준비 76 | 77 | > 연결한 두 스토리지는 모두 동일한 data set을 제공하고 있어 78 | > Federated Query의 동작확인을 위해 hot storage인 cluster내의 데이터를 변경 후 79 | > 검색을 하기 위한 준비이다. 그렇지 않으면 query는 중복 데이터를 반환한다. 80 | 81 | ![08](img-fed/08.enter-cluster.png) 82 | 83 | - `Database` 메뉴로 이동하여 `Browse Collections` 클릭 84 | 85 | ![09](img-fed/09.query-cluster.png) 86 | 87 | - `sample_mflix.movies`에서 `find({type: 'movie'}).sort({year: 1})`을 실행 후 결과 확인 88 | 89 | ![10](img-fed/10.update-doc.png) 90 | 91 | - 임의의 필드 값을 변경 후 `UPDATE` 클릭 92 | 93 | > 예) `rated` → `XXX` 94 | 95 | ### Query Federated DB 96 | 97 | ![11](img-fed/11.connect-fed.png) 98 | 99 | - `Data Federation` 메뉴로 이동하여 `Connect` 클릭 100 | 101 | ![11-2](img-fed/11.connect-fed-2.png) 102 | 103 | - 사용할 클라이언트 선택 후 connection string copy 104 | > 예) mongosh 105 | > 앱에서 사용 시 `` 변경 필요 106 | 107 | ![11-3](img-fed/11.connect-fed-3.png) 108 | 109 | - 연결 후 federation의 database와 collection 확인 110 | 111 | ``` 112 | > show dbs 113 | > use mflix 114 | > show collections 115 | ``` 116 | 117 | ![12](img-fed/12.query-fed.png) 118 | 119 | - 동일한 query를 사용해 2개의 document가 검색되는 것을 확인 120 | ``` 121 | db.movies.find({type: 'movie'}).sort({year: 1}).limit(2) 122 | ``` 123 | > cluster에서 수정한 `rated`필드의 값이 `XXX` 변경된 것과 124 | > S3에서 가져온 원본 데이터 `NOT RATED`를 갖는 동일한 document를 확인할 수 있다. 125 | > 두 document의 `_id`가 동일 126 | 127 | ### Query S3 Only 128 | 129 | ![13](img-fed/13.query-s3.png) 130 | 131 | - Federation에서 cluster를 제거 후, cold storage인 S3만 남겨두고 동일한 query 실행 132 | ``` 133 | db.movies.find({type: 'movie'}).sort({year: 1}).limit(2) 134 | ``` 135 | > cluster에 존재하는 document는 검색이 안되는 것을 확인할 수 있다. 136 | > S3에 존재하는 원본 `NOT RATED` document와 137 | > 다음 순서로 검색되는 새로운 document만 결과에서 확인할 수 있다. 138 | 139 | # Online Archive 140 | 141 | - [Online Archive 생성](#online-archive-생성) 142 | - [아카이빙 완료 클러스터 검색](#아카이빙-후-클러스터-검색) 143 | - [Archive Federation Query](#archive-federation-query) 144 | 145 | **Note**: M10 이상 티어에서 제공 146 | 147 | > [ [Online Archive 공식 매뉴얼](https://www.mongodb.com/docs/atlas/online-archive/configure-online-archive/) ] 148 | 149 | ## Online Archive 생성 150 | 151 | ![01](img-ola/01.enter-ola.png) 152 | 153 | - `Database` 메뉴에서 `Browse Collections` 선택 후 `Online Archive` 탭 선택 154 | - `Configure Online Archive` 클릭 155 | - 이 후 보여주는 간단한 소개 페이지에서 `Next` 클릭 156 | 157 | ![02](img-ola/02.rule-date.png) 158 | 159 | - `Namespace`에 Federation 실습에서 사용했던 `sample_mflix.movies` 입력 160 | 161 | - `Date Match`는 document의 time field를 기준으로 아카이빙 여부를 결정한다 162 | 163 | - `Date Field`: 아카이빙 결정 기준으로 사용할 time field 164 | - `Age limit`: time field와 현재 시간을 비교하여 아카이빙에 포함시킬 기준 경과 시간 165 | - `Choose date format`: ISODate, Epoch sec, Epoch ms, Epoch ns 중 하나 166 | > 테스트에서는 `Custom Criteria`를 사용하기로 한다 167 | 168 | - `Deletion Age Limit`: 데이터 유지정책에 따라 데이터를 완전히 삭제할 아카이빙 후 보유 시간 169 | - `Schedule Archiving Window`: Online Archive의 기본 동작은 임의의 시간 주기적인 아카이빙이지만 cluster가 덜 바쁜 특정 시간대를 선택해 아카이빙을 실행할 수 있다 170 | 171 | ![02](img-ola/02.rule-query.png) 172 | 173 | - `Custom Criteria` 탭을 선택 후 174 | - 아카이빙에 포함할 document검색에 사용될 query 정의 175 | ``` 176 | { 177 | "type": {"$eq": "movie"} 178 | } 179 | ``` 180 | > `type` 값이 `movie` 인 document만 아카이빙 181 | - `Next` 클릭 182 | 183 | > 아카이빙의 기준이 될 필드는 반드시 인덱스가 정의돼 있어야 한다. 184 | > 필드에 인덱스가 중분치 않을 경우 `Index Sufficiency Warning` 프로젝트 에러가 발생한다. 185 | > [ [가이드의 노트참조](https://www.mongodb.com/docs/atlas/online-archive/configure-online-archive/#create-an-archiving-rule-by-providing-the-following-information) ] 186 | 187 | ![03](img-ola/03.keys.png) 188 | 189 | - document를 object storage에 저장할 때 사용할 파티션 키 지정 190 | > Federation 실습에서 사용했던 S3 데이터와 동일하게 `type`과 `year` 사용 191 | > 192 | > partition key는 document를 파일로 저장할 때 tree구조 path의 노드로 사용되기 때문에 193 | > 검색에 사용할 query pattern에 맞춰 키와 순서를 정해야 검색 성능을 최적화할 수 있다. 194 | - `Next` 클릭 195 | 196 | ![04](img-ola/04.begin.png) 197 | 198 | - rule 리뷰 후 `Begin Archiving` 클릭 199 | - `Confirm` 클릭 200 | 201 | ![05](img-ola/05.done.png) 202 | 203 | - `Archive Last Updated`에서 최종 아카이빙 마지막 시간을 확인할 수 있다 204 | 205 | ## 아카이빙 후 클러스터 검색 206 | 207 | ![06](img-ola/06.query-cluster.png) 208 | 209 | - `Collections` 탭에서 아카이빙된 document 검색을 시도하면 `QUERY RESULTS: 0`을 확인할 수 있다 210 | 211 | ## Archive Federation Query 212 | 213 | ![07](img-ola/07.conn-ola.png) 214 | 215 | - `Online Archive`에서 `Connect` 클릭 후 216 | - 사용할 클라이언트 선택 (예: mongosh) 217 | 218 | ![07-2](img-ola/07.conn-ola-2.png) 219 | 220 | - `Connect to Cluster and Online Archive` 선택 후 URL 복사 221 | > client에서 사용 시 222 | > `myFirstDatabase`와 ``은 변경 필요 223 | 224 | ![08](img-ola/08.query-ola.png) 225 | 226 | - Federation query 실행 시 cluster에서 제거된(migrate to archive) document가 검색된 것을 확인할 수 있다 227 | -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/01.enter-atlasui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/01.enter-atlasui.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/02.enter-fed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/02.enter-fed.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/03.rename.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/03.rename.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/04.cluster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/04.cluster.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/05.add-cluster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/05.add-cluster.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/06.enable-s3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/06.enable-s3.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/07.add-s3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/07.add-s3.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/08.enter-cluster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/08.enter-cluster.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/09.query-cluster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/09.query-cluster.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/10.update-doc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/10.update-doc.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/11.connect-fed-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/11.connect-fed-2.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/11.connect-fed-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/11.connect-fed-3.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/11.connect-fed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/11.connect-fed.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/12.query-fed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/12.query-fed.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-fed/13.query-s3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-fed/13.query-s3.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-ola/01.enter-ola.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-ola/01.enter-ola.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-ola/02.rule-date.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-ola/02.rule-date.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-ola/02.rule-query.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-ola/02.rule-query.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-ola/03.keys.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-ola/03.keys.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-ola/04.begin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-ola/04.begin.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-ola/05.done.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-ola/05.done.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-ola/06.query-cluster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-ola/06.query-cluster.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-ola/07.conn-ola-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-ola/07.conn-ola-2.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-ola/07.conn-ola.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-ola/07.conn-ola.png -------------------------------------------------------------------------------- /05.atlas-serverless/03.federation-onlinearchive/img-ola/08.query-ola.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MongoDBAtlas/MongoDBAtlasTraining/dc2ef5ce04417174caeea0f8eccfa45492131d99/05.atlas-serverless/03.federation-onlinearchive/img-ola/08.query-ola.png -------------------------------------------------------------------------------- /05.atlas-serverless/README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | # MongoDB Atlas Serverless Hands-on 4 | 5 | - [Data API & Function](01.dataapi-function/README.md) 6 | - [Data Federation & Online Archive](03.federation-onlinearchive/README.md) 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | # MongoDB Atlas Training 4 | 5 | __Agenda__ 6 | 7 | 8 | 0. __PreWork__ 9 | - [GitHub][0] 10 | 11 | 1. __Atlas__ 12 | - MongoDB & Document Model 13 | - MQL 14 |   15 | - __Handson__ [GitHub][1] 16 | - Provision database 17 | - CRUD 18 |   19 | 20 | - Break 21 | 22 | 23 | 2. __Document Modeling__ 24 | - Modeling 25 | 26 |    27 | 3. __Index & Aggregation__ 28 | - Index 29 | - Explain (ESR & Tips) 30 | - Monitoring 31 | - Aggregation 32 | 33 |   34 | - __Handson__ [GitHub][3] 35 | - Index & Explain 36 | 37 | 38 | - Break 39 | 40 | 41 | 4. __Atlas Search__ 42 | - Full text search & index 43 | - Search functions 44 |   45 | 46 | - __Handson__ [GitHub][4] 47 | - Search 48 | 49 |   50 | - Break 51 | 52 | 53 | 5. __Atlas Serverless services__ 54 | - Functions & Triggers 55 | - Data API & GraphQL 56 | - Data Federation & Online Archive 57 | 58 |   59 | - __Handson__ [GitHub][5] 60 | - Data API 61 | - Functions 62 | 63 | 64 | [0]: https://github.com/MongoDBAtlas/MongoDBAtlasTraining/tree/main/00.pre-work 65 | 66 | [1]: https://github.com/MongoDBAtlas/MongoDBAtlasTraining/tree/main/01.Provision%20and%20CRUD 67 | 68 | 69 | [3]: https://github.com/MongoDBAtlas/MongoDBAtlasTraining/tree/main/03.index%20and%20aggregation 70 | 71 | 72 | [4]: https://github.com/MongoDBAtlas/MongoDBAtlasTraining/tree/main/04.atlas-search 73 | 74 | [5]: https://github.com/MongoDBAtlas/MongoDBAtlasTraining/tree/main/05.atlas-serverless --------------------------------------------------------------------------------