├── map.jpg ├── Integrate with Machine Learning APIs: Challenge Lab ├── Deploy a Compute Instance with a Remote Startup Script ├── Scale Out and Update a Containerized Application on a Kubernetes Cluster ├── Build and Deploy a Docker Image to a Kubernetes Cluster f ├── qwiklabs.yaml ├── Exploring the Public Cryptocurrency Datasets Available in BigQuery ├── Build and Secure Networks in Google Cloud: Challenge Lab ├── Migrate a MySQL Database to Google Cloud SQL ├── Deploy and Manage Cloud Environments with Google Cloud: Challenge Lab ├── qwiklabs.jinja ├── Configure a Firewall and a Startup Script with Deployment Manager lab ├── Engineer Data in Google Cloud :Challenge Lab | Qwiklabs | Google Cloud Platform ├── Deploy to Kubernetes in Google Cloud:challenge lab ├── commands ├── Ensure Access & Identity in Google Cloud: Challenge Lab ├── Build a Website on Google Cloud: Challenge Lab ├── Set up and Configure a Cloud Environment in Google Cloud ├── Perform Foundational Data, ML, and AI Tasks in Google Cloud: Challenge Lab ├── Build and Optimize Data Warehouses with BigQuery: Challenge Lab ├── Getting Started: Create and Manage Cloud Resources: Challenge ├── Create ML Models with BigQuery ML: Challenge Lab ├── analyze-images.py └── Insights from Data with BigQuery: Challenge Lab(Updated task 9) /map.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rahullrajesh/implement-devops/HEAD/map.jpg -------------------------------------------------------------------------------- /Integrate with Machine Learning APIs: Challenge Lab: -------------------------------------------------------------------------------- 1 | #WATCH FULL LAB ON : https://www.youtube.com/watch?v=ZZBt6geViXc 2 | 3 | python3 analyze-images.py $DEVSHELL_PROJECT_ID $DEVSHELL_PROJECT_ID 4 | 5 | 6 | SELECT locale,COUNT(locale) as lcount FROM image_classification_dataset.image_text_detail GROUP BY locale ORDER BY lcount DESC 7 | -------------------------------------------------------------------------------- /Deploy a Compute Instance with a Remote Startup Script: -------------------------------------------------------------------------------- 1 | #WATCH FULL LAB : https://www.youtube.com/watch?v=MZZoiEIDOr8 2 | 3 | gsutil mb gs:// 4 | 5 | 6 | gsutil cp resources-install-web.sh gs:// 7 | gcloud compute instances create example-instance --zone us-central1-a --tags http-server \ 8 | --metadata startup-script-url=gs:///resources-install-web.sh 9 | 10 | 11 | gcloud compute firewall-rules create allow-http --target-tags http-server --source-ranges 0.0.0.0/0 --allow tcp:80 12 | -------------------------------------------------------------------------------- /Scale Out and Update a Containerized Application on a Kubernetes Cluster: -------------------------------------------------------------------------------- 1 | #WATCH FULL LAB ON : https://www.youtube.com/watch?v=aiSfeGGWvKY 2 | 3 | 4 | gsutil cp gs://sureskills-ql/challenge-labs/ch05-k8s-scale-and-update/echo-web-v2.tar.gz . 5 | 6 | 7 | tar xvzf echo-web-v2.tar.gz 8 | 9 | 10 | gcloud builds submit --tag gcr.io/$DEVSHELL_PROJECT_ID/echo-app:v2 . 11 | 12 | 13 | gcloud container clusters get-credentials echo-cluster --zone us-central1-a 14 | 15 | kubectl create deployment echo-web --image=gcr.io/qwiklabs-resources/echo-app:v1 16 | 17 | kubectl expose deployment echo-web --type=LoadBalancer --port 80 --target-port 8000 18 | 19 | 20 | kubectl edit deploy echo-web 21 | 22 | 23 | 24 | 25 | kubectl scale deploy echo-web --replicas=2 26 | -------------------------------------------------------------------------------- /Build and Deploy a Docker Image to a Kubernetes Cluster f: -------------------------------------------------------------------------------- 1 | #WATCH FULL VIDEO ON :https://www.youtube.com/watch?v=8WMp4K_LVJY 2 | 3 | 4 | gsutil cp gs://sureskills-ql/challenge-labs/ch04-kubernetes-app-deployment/echo-web.tar.gz . 5 | 6 | 7 | gsutil cp gs://$DEVSHELL_PROJECT_ID/echo-web.tar.gz . 8 | 9 | 10 | tar -xvf echo-web.tar.gz 11 | 12 | 13 | gcloud builds submit --tag gcr.io/$DEVSHELL_PROJECT_ID/echo-app:v1 . 14 | 15 | 16 | 17 | gcloud container clusters create echo-cluster --num-nodes 2 --zone us-central1-a --machine-type n1-standard-2 18 | 19 | 20 | 21 | 22 | kubectl create deployment echo-web --image=gcr.io/qwiklabs-resources/echo-app:v1 23 | 24 | 25 | 26 | kubectl expose deployment echo-web --type=LoadBalancer --port=80 --target-port=8000 27 | 28 | 29 | 30 | kubectl get svc 31 | -------------------------------------------------------------------------------- /qwiklabs.yaml: -------------------------------------------------------------------------------- 1 | # Copyright 2016 Google Inc. All rights reserved. 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | 15 | imports: 16 | - path: qwiklabs.jinja 17 | 18 | resources: 19 | - name: qwiklabs 20 | type: qwiklabs.jinja 21 | properties: 22 | zone: us-central1-a -------------------------------------------------------------------------------- /Exploring the Public Cryptocurrency Datasets Available in BigQuery: -------------------------------------------------------------------------------- 1 | CREATE OR REPLACE TABLE lab.51 (transaction_hash STRING) as 2 | SELECT transaction_id FROM `bigquery-public-data.bitcoin_blockchain.transactions` , UNNEST( outputs ) as outputs 3 | where outputs.output_satoshis = 19499300000000 4 | 5 | 6 | 7 | 8 | CREATE OR REPLACE TABLE lab.52 (balance NUMERIC) as 9 | WITH double_entry_book AS ( 10 | -- debits 11 | SELECT 12 | array_to_string(inputs.addresses, ",") as address 13 | , -inputs.value as value 14 | FROM `bigquery-public-data.crypto_bitcoin.inputs` as inputs 15 | UNION ALL 16 | -- credits 17 | SELECT 18 | array_to_string(outputs.addresses, ",") as address 19 | , outputs.value as value 20 | FROM `bigquery-public-data.crypto_bitcoin.outputs` as outputs 21 | ) 22 | SELECT 23 | sum(value) as balance 24 | FROM double_entry_book 25 | where address = "1XPTgDRhN8RFnzniWCddobD9iKZatrvH4" 26 | -------------------------------------------------------------------------------- /Build and Secure Networks in Google Cloud: Challenge Lab: -------------------------------------------------------------------------------- 1 | #WATCH FULL LAB ON : https://www.youtube.com/watch?v=22TczCBXyys 2 | 3 | 4 | Step 1: gcloud compute firewall-rules delete open-access 5 | 6 | 7 | 8 | step 2: gcloud compute instances start bastion 9 | 10 | 11 | step 3: gcloud compute firewall-rules create ssh-ingress --allow=tcp:22 --source-ranges 35.235.240.0/20 --target-tags ssh-ingress --network acme-vpc 12 | gcloud compute instances add-tags bastion --tags=ssh-ingress --zone=us-central1-b 13 | 14 | 15 | step 4: gcloud compute firewall-rules create http-ingress --allow=tcp:80 --source-ranges 0.0.0.0/0 --target-tags http-ingress --network acme-vpc 16 | 17 | step 5: gcloud compute instances add-tags juice-shop --tags=http-ingress --zone=us-central1-b 18 | 19 | 20 | 21 | step 6: gcloud compute firewall-rules create internal-ssh-ingress --allow=tcp:22 --source-ranges 192.168.10.0/24 --target-tags internal-ssh-ingress --network acme-vpc 22 | 23 | step 7 :gcloud compute instances add-tags juice-shop --tags=internal-ssh-ingress --zone=us-central1-b 24 | 25 | -------------------------------------------------------------------------------- /Migrate a MySQL Database to Google Cloud SQL: -------------------------------------------------------------------------------- 1 | #WATCH FULL VIDEO ON : https://www.youtube.com/watch?v=UMu6HChisU0 2 | 3 | 4 | export ZONE=us-central1-a 5 | 6 | 7 | 8 | gcloud sql instances create wordpress --tier=db-n1-standard-1 --activation-policy=ALWAYS --gce-zone $ZONE 9 | 10 | 11 | gcloud sql users set-password --host % root --instance wordpress --password Password1* 12 | 13 | 14 | export ADDRESS= 15 | 16 | 17 | gcloud sql instances patch wordpress --authorized-networks $ADDRESS --quiet 18 | 19 | 20 | gcloud compute ssh blog --zone=us-central1-a 21 | 22 | 23 | MYSQLIP=$(gcloud sql instances describe wordpress --format="value(ipAddresses.ipAddress)") 24 | 25 | 26 | mysql --host=[INSTANCE_IP_ADDR] \ 27 | --user=root --password 28 | 29 | 30 | CREATE DATABASE wordpress; 31 | CREATE USER 'blogadmin'@'%' IDENTIFIED BY 'Password1*'; 32 | GRANT ALL PRIVILEGES ON wordpress.* TO 'blogadmin'@'%'; 33 | FLUSH PRIVILEGES; 34 | 35 | 36 | 37 | sudo mysqldump -u root -pPassword1* wordpress > wordpress_backup.sql 38 | 39 | 40 | 41 | mysql --host=$MYSQLIP --user=root -pPassword1* --verbose wordpress < wordpress_backup.sql 42 | 43 | 44 | 45 | sudo service apache2 restart 46 | 47 | 48 | cd /var/www/html/wordpress 49 | 50 | 51 | 52 | sudo nano wp-config.php 53 | 54 | -------------------------------------------------------------------------------- /Deploy and Manage Cloud Environments with Google Cloud: Challenge Lab: -------------------------------------------------------------------------------- 1 | #WATCH FULL VIDEO ON : https://www.youtube.com/watch?v=gSVviDjfTEg 2 | 3 | cd /work/dm 4 | 5 | sed -i s/SET_REGION/us-east1/g prod-network.yaml 6 | 7 | gcloud deployment-manager deployments create prod-network --config=prod-network.yaml 8 | 9 | 10 | gcloud config set compute/zone us-east1-b 11 | 12 | gcloud container clusters create kraken-prod \ 13 | --num-nodes 2 \ 14 | --network kraken-prod-vpc \ 15 | --subnetwork kraken-prod-subnet 16 | 17 | gcloud container clusters get-credentials kraken-prod 18 | 19 | cd /work/k8s 20 | 21 | for F in $(ls *.yaml); do kubectl create -f $F; done 22 | 23 | 24 | gcloud config set compute/zone us-east1-b 25 | 26 | gcloud compute instances create kraken-admin --network-interface="subnet=kraken-mgmt-subnet" --network-interface="subnet=kraken-prod-subnet" 27 | 28 | 29 | gcloud config set compute/zone us-east1-b 30 | 31 | gcloud container clusters get-credentials spinnaker-tutorial 32 | 33 | DECK_POD=$(kubectl get pods --namespace default -l "cluster=spin-deck" -o jsonpath="{.items[0].metadata.name}") 34 | 35 | kubectl port-forward --namespace default $DECK_POD 8080:9000 >> /dev/null & 36 | 37 | 38 | gcloud config set compute/zone us-east1-b 39 | 40 | gcloud source repos clone sample-app 41 | 42 | cd sample-app 43 | 44 | touch a 45 | 46 | git config --global user.email "$(gcloud config get-value account)" 47 | 48 | git config --global user.name "Student" 49 | 50 | git commit -a -m "change" 51 | 52 | git tag v1.0.1 53 | 54 | git push --tags 55 | 56 | -------------------------------------------------------------------------------- /qwiklabs.jinja: -------------------------------------------------------------------------------- 1 | resources: 2 | - type: compute.v1.instance 3 | name: vm-test 4 | properties: 5 | zone: {{ properties["zone"] }} 6 | machineType: https://www.googleapis.com/compute/v1/projects/{{ env["project"] }}/zones/{{ properties["zone"] }}/machineTypes/f1-micro 7 | # For examples on how to use startup scripts on an instance, see: 8 | # https://cloud.google.com/compute/docs/startupscript 9 | disks: 10 | - deviceName: boot 11 | type: PERSISTENT 12 | boot: true 13 | autoDelete: true 14 | initializeParams: 15 | diskName: disk-{{ env["deployment"] }} 16 | sourceImage: https://www.googleapis.com/compute/v1/projects/debian-cloud/global/images/family/debian-9 17 | networkInterfaces: 18 | - network: https://www.googleapis.com/compute/v1/projects/{{ env["project"] }}/global/networks/default 19 | # Access Config required to give the instance a public IP address 20 | accessConfigs: 21 | - name: External NAT 22 | type: ONE_TO_ONE_NAT 23 | tags: 24 | items: 25 | - http 26 | metadata: 27 | items: 28 | - key: startup-script 29 | value: | 30 | #!/bin/bash 31 | apt-get update 32 | apt-get install -y apache2 33 | - type: compute.v1.firewall 34 | name: default-allow-http 35 | properties: 36 | network: https://www.googleapis.com/compute/v1/projects/{{ env["project"] }}/global/networks/default 37 | targetTags: 38 | - http 39 | allowed: 40 | - IPProtocol: tcp 41 | ports: 42 | - '80' 43 | sourceRanges: 44 | - 0.0.0.0/0 -------------------------------------------------------------------------------- /Configure a Firewall and a Startup Script with Deployment Manager lab: -------------------------------------------------------------------------------- 1 | #WATCH FULL LAB : https://www.youtube.com/watch?v=LPfGE5BgZP8 2 | 3 | 4 | mkdir deployment_manager 5 | cd deployment_manager 6 | gsutil cp gs://spls/gsp302/* . 7 | gcloud deployment-manager deployments create test --config=qwiklabs.yaml 8 | 9 | # Copy the content from Github's qwiklabs.yaml 10 | # https://github.com/rahullrajesh/implement-devops/blob/master/qwiklabs.yaml 11 | 12 | # Edit qwiklabs.yaml 13 | nano qwiklabs.yaml 14 | 15 | # Use CTRL + Shift + 6 to mark the beginning of your block 16 | # Move cursor with arrow keys to end of your block, the text will be highlighted 17 | # Use CTRL + K to cut/delete block 18 | # Save file, use CTRL + X, type Y, Enter 19 | 20 | # Copy the content from Github's qwiklabs.jinja 21 | # https://github.com/rahullrajesh/implement-devops/blob/master/qwiklabs.jinja 22 | 23 | # Edit qwiklabs.jinja 24 | nano qwiklabs.jinja 25 | 26 | # Use CTRL + Shift + 6 to mark the beginning of your block 27 | # Move cursor with arrow keys to end of your block, the text will be highlighted 28 | # Use CTRL + K to cut/delete block 29 | # Save file, use CTRL + X, type Y, Enter 30 | 31 | gcloud deployment-manager deployments delete test 32 | 33 | gcloud deployment-manager deployments create test --config=qwiklabs.yaml 34 | 35 | # Wait until the deployment completed 36 | # Check the newly created vm instance external IP address 37 | 38 | gcloud compute instances list 39 | 40 | # Copy the EXTERNAL_IP of 'vm-test' instance 41 | # Paste the IP to the new tab of the browser 42 | # Make sure you can see the Apache welcome page 43 | 44 | -------------------------------------------------------------------------------- /Engineer Data in Google Cloud :Challenge Lab | Qwiklabs | Google Cloud Platform: -------------------------------------------------------------------------------- 1 | #WATCH FULL VIDEO ON : https://www.youtube.com/watch?v=gTxcnFIc0Dc 2 | 3 | CREATE OR REPLACE TABLE 4 | taxirides.taxi_training_data AS 5 | SELECT 6 | (tolls_amount + fare_amount) AS fare_amount, 7 | pickup_datetime, 8 | pickup_longitude AS pickuplon, 9 | pickup_latitude AS pickuplat, 10 | dropoff_longitude AS dropofflon, 11 | dropoff_latitude AS dropofflat, 12 | passenger_count AS passengers, 13 | FROM 14 | taxirides.historical_taxi_rides_raw 15 | WHERE 16 | RAND() < 0.001 17 | AND trip_distance > 0 18 | AND fare_amount >= 2.5 19 | AND pickup_longitude > -78 20 | AND pickup_longitude < -70 21 | AND dropoff_longitude > -78 22 | AND dropoff_longitude < -70 23 | AND pickup_latitude > 37 24 | AND pickup_latitude < 45 25 | AND dropoff_latitude > 37 26 | AND dropoff_latitude < 45 27 | AND passenger_count > 0 28 | 29 | 30 | 31 | CREATE OR REPLACE MODEL taxirides.fare_model 32 | TRANSFORM( 33 | * EXCEPT(pickup_datetime) 34 | 35 | , ST_Distance(ST_GeogPoint(pickuplon, pickuplat), ST_GeogPoint(dropofflon, dropofflat)) AS euclidean 36 | , CAST(EXTRACT(DAYOFWEEK FROM pickup_datetime) AS STRING) AS dayofweek 37 | , CAST(EXTRACT(HOUR FROM pickup_datetime) AS STRING) AS hourofday 38 | ) 39 | OPTIONS(input_label_cols=['fare_amount'], model_type='linear_reg') 40 | AS 41 | 42 | SELECT * FROM taxirides.taxi_training_data 43 | 44 | 45 | 46 | 47 | CREATE OR REPLACE TABLE taxirides.2015_fare_amount_predictions 48 | AS 49 | SELECT * FROM ML.PREDICT(MODEL taxirides.fare_model,( 50 | SELECT * FROM taxirides.report_prediction_data) 51 | )​ 52 | 53 | 54 | -------------------------------------------------------------------------------- /Deploy to Kubernetes in Google Cloud:challenge lab: -------------------------------------------------------------------------------- 1 | #WATCH FULL VIDEO ON : https://www.youtube.com/watch?v=4cRGGbxqymk 2 | 3 | gsutil cat gs://cloud-training/gsp318/marking/setup_marking.sh | bash 4 | gcloud source repos clone valkyrie-app 5 | cd valkyrie-app 6 | cat > Dockerfile <> /dev/null & 43 | printf $(kubectl get secret cd-jenkins -o jsonpath="{.data.jenkins-admin-password}" | base64 --decode);echo 44 | 45 | sed -i "s/green/orange/g" source/html.go 46 | # Update project in Jenkinsfile 47 | sed -i "s/YOUR_PROJECT/$GOOGLE_CLOUD_PROJECT/g" Jenkinsfile 48 | git config --global user.email "you@example.com" 49 | git config --global user.name "student" 50 | git add . 51 | -------------------------------------------------------------------------------- /commands: -------------------------------------------------------------------------------- 1 | gcloud config set compute/zone us-east1-b 2 | git clone https://source.developers.google.com/p/$DEVSHELL_PROJECT_ID/r/sample-app 3 | gcloud container clusters get-credentials jenkins-cd 4 | kubectl create clusterrolebinding cluster-admin-binding --clusterrole=cluster-admin --user=$(gcloud config get-value account) 5 | export POD_NAME=$(kubectl get pods --namespace default -l "app.kubernetes.io/component=jenkins-master" -l "app.kubernetes.io/instance=cd" -o jsonpath="{.items[0].metadata.name}") 6 | kubectl port-forward $POD_NAME 8080:8080 >> /dev/null & 7 | printf $(kubectl get secret cd-jenkins -o jsonpath="{.data.jenkins-admin-password}" | base64 --decode);echo 8 | cd sample-app 9 | kubectl create ns production 10 | kubectl apply -f k8s/production -n production 11 | kubectl apply -f k8s/canary -n production 12 | kubectl apply -f k8s/services -n production 13 | kubectl get svc 14 | kubectl get service gceme-frontend -n production 15 | git init 16 | git config credential.helper gcloud.sh 17 | git remote add origin https://source.developers.google.com/p/$DEVSHELL_PROJECT_ID/r/sample-app 18 | git config --global user.email "[EMAIL_ADDRESS]" 19 | git config --global user.name "[USERNAME]" 20 | git checkout -b new-feature 21 | vi html.go 22 | vi main.go 23 | git add Jenkinsfile html.go main.go 24 | git commit -m "Version 2.0.0" 25 | git push origin new-feature 26 | kubectl proxy & 27 | curl \ 28 | http://localhost:8001/api/v1/namespaces/new-feature/services/gceme-frontend:80/proxy/version 29 | kubectl get service gceme-frontend -n production 30 | git checkout -b canary 31 | git push origin canary 32 | export FRONTEND_SERVICE_IP=$(kubectl get -o \ 33 | jsonpath="{.status.loadBalancer.ingress[0].ip}" --namespace=production services gceme-frontend) 34 | while true; do curl http://$FRONTEND_SERVICE_IP/version; sleep 1; done 35 | 36 | git merge canary 37 | 38 | git push origin master 39 | export FRONTEND_SERVICE_IP=$(kubectl get -o \ 40 | jsonpath="{.status.loadBalancer.ingress[0].ip}" --namespace=production services gceme-frontend) 41 | while true; do curl http://$FRONTEND_SERVICE_IP/version; sleep 1; done 42 | kubectl get service gceme-frontend -n production 43 | 44 | 45 | -------------------------------------------------------------------------------- /Ensure Access & Identity in Google Cloud: Challenge Lab: -------------------------------------------------------------------------------- 1 | 2 | gcloud config set compute/zone us-east1-b 3 | 4 | nano role-definition.yaml 5 | 6 | title: "Edirca Storage Update" 7 | description: "Add and update objects in Google Cloud Storage buckets" 8 | includedPermissions: 9 | - storage.buckets.get 10 | - storage.objects.get 11 | - storage.objects.list 12 | - storage.objects.update 13 | - storage.objects.create 14 | 15 | 16 | gcloud iam roles create orca_storage_update \ 17 | --project $DEVSHELL_PROJECT_ID \ 18 | --file role-definition.yaml 19 | 20 | 21 | gcloud iam service-accounts create orca-private-cluster-sa \ 22 | --display-name "Orca Private Cluster Service Account" 23 | 24 | gcloud projects add-iam-policy-binding $DEVSHELL_PROJECT_ID \ 25 | --member serviceAccount:orca-private-cluster-sa@$DEVSHELL_PROJECT_ID.iam.gserviceaccount.com --role roles/monitoring.viewer 26 | 27 | gcloud projects add-iam-policy-binding $DEVSHELL_PROJECT_ID \ 28 | --member serviceAccount:orca-private-cluster-sa@$DEVSHELL_PROJECT_ID.iam.gserviceaccount.com --role roles/monitoring.metricWriter 29 | 30 | gcloud projects add-iam-policy-binding $DEVSHELL_PROJECT_ID \ 31 | --member serviceAccount:orca-private-cluster-sa@$DEVSHELL_PROJECT_ID.iam.gserviceaccount.com --role roles/logging.logWriter 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | gcloud projects add-iam-policy-binding $DEVSHELL_PROJECT_ID \ 40 | --member serviceAccount:orca-private-cluster-sa@$DEVSHELL_PROJECT_ID.iam.gserviceaccount.com --role projects/$DEVSHELL_PROJECT_ID/roles/orca_storage_update 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | gcloud container clusters create orca-test-cluster --network orca-build-vpc --subnetwork orca-build-subnet --service-account orca-private-cluster-sa@qwiklabs-gcp-01-73bc421e624d.iam.gserviceaccount.com --enable-master-authorized-networks --master-authorized-networks 192.168.10.2/32 --enable-ip-alias --enable-private-nodes --master-ipv4-cidr 10.142.0.0/28 --enable-private-endpoint 49 | 50 | 51 | 52 | gcloud container clusters get-credentials orca-test-cluster --internal-ip --zone=us-east1-b 53 | kubectl create deployment hello-server --image=gcr.io/google-samples/hello-app:1.0 54 | -------------------------------------------------------------------------------- /Build a Website on Google Cloud: Challenge Lab: -------------------------------------------------------------------------------- 1 | #VIEW FULL LAB VIDEO : https://www.youtube.com/watch?v=6iXU2bKaaMw 2 | 3 | git clone https://github.com/googlecodelabs/monolith-to-microservices.git 4 | cd ~/monolith-to-microservices 5 | ./setup.sh 6 | 7 | 8 | cd ~/monolith-to-microservices/monolith 9 | 10 | 11 | 12 | 13 | gcloud services enable cloudbuild.googleapis.com 14 | 15 | gcloud builds submit --tag gcr.io/${GOOGLE_CLOUD_PROJECT}/fancytest:1.0.0 . 16 | 17 | 18 | 19 | gcloud services enable container.googleapis.com 20 | 21 | gcloud container clusters create fancy-cluster --num-nodes 3 --zone us-central1-a --machine-type n1-standard-1 22 | 23 | kubectl create deployment fancytest --image=gcr.io/${GOOGLE_CLOUD_PROJECT}/fancytest:1.0.0 24 | 25 | kubectl expose deployment fancytest --name=fancytest --type=LoadBalancer --port=80 --target-port=8080 26 | 27 | 28 | 29 | cd ~/monolith-to-microservices/microservices/src/orders 30 | gcloud builds submit --tag gcr.io/${GOOGLE_CLOUD_PROJECT}/orders:1.0.0 . 31 | 32 | cd ~/monolith-to-microservices/microservices/src/products 33 | gcloud builds submit --tag gcr.io/${GOOGLE_CLOUD_PROJECT}/products:1.0.0 . 34 | 35 | 36 | 37 | cd ~/monolith-to-microservices/microservices/src/orders 38 | kubectl create deployment orders --image=gcr.io/${GOOGLE_CLOUD_PROJECT}/orders:1.0.0 39 | 40 | 41 | kubectl expose deployment orders --type=LoadBalancer --port 80 --target-port 8081 42 | 43 | 44 | 45 | 46 | cd ~/monolith-to-microservices/microservices/src/products 47 | kubectl create deployment products --image=gcr.io/${GOOGLE_CLOUD_PROJECT}/products:1.0.0 48 | 49 | kubectl expose deployment products --type=LoadBalancer --port 80 --target-port 8082 50 | 51 | kubectl get services 52 | 53 | 54 | 55 | cd ~/monolith-to-microservices/react-app 56 | nano .env 57 | 58 | 59 | REACT_APP_ORDERS_URL=http://localhost:8081/api/orders 60 | REACT_APP_PRODUCTS_URL=http://localhost:8082/api/products 61 | 62 | 63 | 64 | 65 | REACT_APP_ORDERS_URL=http:///api/orders 66 | REACT_APP_PRODUCTS_URL=http:///api/products 67 | 68 | 69 | 70 | cd ~/monolith-to-microservices/microservices/src/frontend 71 | gcloud builds submit --tag gcr.io/${GOOGLE_CLOUD_PROJECT}/frontend:1.0.0 . 72 | 73 | 74 | kubectl create deployment frontend --image=gcr.io/${GOOGLE_CLOUD_PROJECT}/frontend:1.0.0 75 | 76 | kubectl expose deployment frontend --type=LoadBalancer --port 80 --target-port 8080 77 | 78 | 79 | 80 | 81 | 82 | -------------------------------------------------------------------------------- /Set up and Configure a Cloud Environment in Google Cloud: -------------------------------------------------------------------------------- 1 | #WATCH FULL VIDEO ON : https://www.youtube.com/watch?v=M4iWdIqZRm8 2 | 3 | gcloud compute networks create griffin-dev-vpc --subnet-mode custom 4 | 5 | gcloud compute networks subnets create griffin-dev-wp --network=griffin-dev-vpc --region us-east1 --range=192.168.16.0/20 6 | 7 | gcloud compute networks subnets create griffin-dev-mgmt --network=griffin-dev-vpc --region us-east1 --range=192.168.32.0/20 8 | 9 | 10 | gsutil cp -r gs://cloud-training/gsp321/dm . 11 | 12 | cd dm 13 | 14 | sed -i s/SET_REGION/us-east1/g prod-network.yaml 15 | 16 | gcloud deployment-manager deployments create prod-network \ 17 | --config=prod-network.yaml 18 | 19 | cd .. 20 | 21 | 22 | gcloud compute instances create bastion --network-interface=network=griffin-dev-vpc,subnet=griffin-dev-mgmt --network-interface=network=griffin-prod-vpc,subnet=griffin-prod-mgmt --tags=ssh --zone=us-east1-b 23 | 24 | gcloud compute firewall-rules create fw-ssh-dev --source-ranges=0.0.0.0/0 --target-tags ssh --allow=tcp:22 --network=griffin-dev-vpc 25 | 26 | gcloud compute firewall-rules create fw-ssh-prod --source-ranges=0.0.0.0/0 --target-tags ssh --allow=tcp:22 --network=griffin-prod-vpc 27 | 28 | 29 | 30 | gcloud sql instances create griffin-dev-db --root-password password --region=us-east1 31 | 32 | gcloud sql connect griffin-dev-db 33 | 34 | # Cut and paste the SQL 35 | 36 | CREATE DATABASE wordpress; 37 | GRANT ALL PRIVILEGES ON wordpress.* TO "wp_user"@"%" IDENTIFIED BY "stormwind_rules"; 38 | FLUSH PRIVILEGES; 39 | 40 | 41 | gcloud container clusters create griffin-dev \ 42 | --network griffin-dev-vpc \ 43 | --subnetwork griffin-dev-wp \ 44 | --machine-type n1-standard-4 \ 45 | --num-nodes 2 \ 46 | --zone us-east1-b 47 | 48 | 49 | gcloud container clusters get-credentials griffin-dev --zone us-east1-b 50 | 51 | cd ~/ 52 | 53 | gsutil cp -r gs://cloud-training/gsp321/wp-k8s . 54 | 55 | cd wp-k8s 56 | 57 | sed -i s/username_goes_here/wp_user/g wp-env.yaml 58 | 59 | sed -i s/password_goes_here/stormwind_rules/g wp-env.yaml 60 | 61 | kubectl create -f wp-env.yaml 62 | 63 | gcloud iam service-accounts keys create key.json --iam-account=cloud-sql-proxy@$GOOGLE_CLOUD_PROJECT.iam.gserviceaccount.com 64 | 65 | kubectl create secret generic cloudsql-instance-credentials --from-file key.json 66 | 67 | 68 | I=$(gcloud sql instances describe griffin-dev-db --format="value(connectionName)") 69 | 70 | sed -i s/YOUR_SQL_INSTANCE/$I/g wp-deployment.yaml 71 | 72 | kubectl create -f wp-deployment.yaml 73 | 74 | kubectl create -f wp-service.yaml 75 | -------------------------------------------------------------------------------- /Perform Foundational Data, ML, and AI Tasks in Google Cloud: Challenge Lab: -------------------------------------------------------------------------------- 1 | # VIEW FULL LAB: https://www.youtube.com/watch?v=LfWcNrkMwQs 2 | # TO GET LAB SCHEMA: 3 | 4 | 5 | gsutil cp gs://cloud-training/gsp323/lab.csv . 6 | 7 | cat lab.csv 8 | 9 | gsutil cp gs://cloud-training/gsp323/lab.schema . 10 | 11 | cat lab.schema 12 | 13 | 14 | 15 | 16 | 17 | 18 | # TASK 4 - PART 1 - CLOUD NATURAL LANGUAGE: 19 | 20 | 21 | gcloud iam service-accounts create my-natlang-sa \ 22 | --display-name "my natural language service account" 23 | 24 | gcloud iam service-accounts keys create ~/key.json \ 25 | --iam-account my-natlang-sa@${GOOGLE_CLOUD_PROJECT}.iam.gserviceaccount.com 26 | 27 | export GOOGLE_APPLICATION_CREDENTIALS="/home/$USER/key.json" 28 | 29 | gcloud auth activate-service-account my-natlang-sa@${GOOGLE_CLOUD_PROJECT}.iam.gserviceaccount.com --key-file=$GOOGLE_APPLICATION_CREDENTIALS 30 | 31 | gcloud ml language analyze-entities --content="Old Norse texts portray Odin as one-eyed and long-bearded, frequently wielding a spear named Gungnir and wearing a cloak and a broad hat." > result.json 32 | 33 | gcloud auth login 34 | (Copy the token from the link provided) 35 | 36 | gsutil cp result.json gs://YOUR_PROJECT-marking/task4-cnl.result 37 | 38 | 39 | 40 | 41 | # TASK 4 - PART 2 - CLOUD SPEECH: 42 | # CREATE AN API KEY AND EXPORT AS "API_KEY" VARIABLE IN CLOUD SHELL THEN CREATE THE FOLLOWING JSON 43 | 44 | nano request.json 45 | 46 | { 47 | "config": { 48 | "encoding":"FLAC", 49 | "languageCode": "en-US" 50 | }, 51 | "audio": { 52 | "uri":"gs://cloud-training/gsp323/task4.flac" 53 | } 54 | } 55 | 56 | curl -s -X POST -H "Content-Type: application/json" --data-binary @request.json \ 57 | "https://speech.googleapis.com/v1/speech:recognize?key=${API_KEY}" > result.json 58 | 59 | gsutil cp result.json gs://YOUR_PROJECT-marking/task4-gcs.result 60 | 61 | 62 | 63 | # TASK 4 - PART 3 - VIDEO INTELLIGENCE: 64 | 65 | 66 | gcloud iam service-accounts create quickstart 67 | 68 | gcloud iam service-accounts keys create key.json --iam-account quickstart@${GOOGLE_CLOUD_PROJECT}.iam.gserviceaccount.com 69 | 70 | gcloud auth activate-service-account --key-file key.json 71 | 72 | export ACCESS_TOKEN=$(gcloud auth print-access-token) 73 | 74 | 75 | nano request.json 76 | 77 | { 78 | "inputUri":"gs://spls/gsp154/video/chicago.mp4", 79 | "features": [ 80 | "TEXT_DETECTION" 81 | ] 82 | } 83 | 84 | 85 | 86 | curl -s -H 'Content-Type: application/json' \ 87 | -H "Authorization: Bearer $ACCESS_TOKEN" \ 88 | 'https://videointelligence.googleapis.com/v1/videos:annotate' \ 89 | -d @request.json 90 | 91 | 92 | 93 | curl -s -H 'Content-Type: application/json' -H "Authorization: Bearer $ACCESS_TOKEN" 'https://videointelligence.googleapis.com/v1/operations/OPERATION_FROM_PREVIOUS_REQUEST' > result1.json 94 | 95 | 96 | gsutil cp result1.json gs://YOUR_PROJECT-marking/task4-gvi.result 97 | -------------------------------------------------------------------------------- /Build and Optimize Data Warehouses with BigQuery: Challenge Lab: -------------------------------------------------------------------------------- 1 | 2 | CREATE OR REPLACE TABLE . 3 | PARTITION BY date 4 | OPTIONS( 5 | partition_expiration_days=90, 6 | description="oxford_policy_tracker table in the COVID 19 Government Response public dataset with an expiry time set to 90 days." 7 | ) AS 8 | SELECT 9 | * 10 | FROM 11 | `bigquery-public-data.covid19_govt_response.oxford_policy_tracker` 12 | WHERE 13 | alpha_3_code NOT IN ('GBR', 'USA') 14 | 15 | 16 | 17 | 18 | ALTER TABLE . 19 | ADD COLUMN population INT64, 20 | ADD COLUMN country_area FLOAT64, 21 | ADD COLUMN mobility STRUCT< 22 | avg_retail FLOAT64, 23 | avg_grocery FLOAT64, 24 | avg_parks FLOAT64, 25 | avg_transit FLOAT64, 26 | avg_workplace FLOAT64, 27 | avg_residential FLOAT64 28 | > 29 | 30 | 31 | UPDATE 32 | `.` t0 33 | SET 34 | population = t1.population 35 | FROM 36 | `bigquery-public-data.covid19_ecdc.covid_19_geographic_distribution_worldwide` t1 37 | WHERE 38 | CONCAT(t0.alpha_3_code, t0.date) = CONCAT(t1.country_territory_code, t1.date); 39 | 40 | 41 | 42 | UPDATE 43 | `.` t0 44 | SET 45 | t0.country_area = t1.country_area 46 | FROM 47 | `bigquery-public-data.census_bureau_international.country_names_area` t1 48 | WHERE 49 | t0.country_name = t1.country_name 50 | 51 | 52 | 53 | UPDATE 54 | `.` t0 55 | SET 56 | t0.mobility.avg_retail = t1.avg_retail 57 | t0.mobility.avg_grocery = t1.avg_grocery 58 | t0.mobility.avg_parks = t1.avg_parks 59 | t0.mobility.avg_transit = t1.avg_transit 60 | t0.mobility.avg_workplace = t1.avg_workplace 61 | t0.mobility.avg_residential = t1.avg_residential 62 | FROM 63 | ( SELECT country_region, date, 64 | AVG(retail_and_recreation_percent_change_from_baseline) as avg_retail, 65 | AVG(grocery_and_pharmacy_percent_change_from_baseline) as avg_grocery, 66 | AVG(parks_percent_change_from_baseline) as avg_parks, 67 | AVG(transit_stations_percent_change_from_baseline) as avg_transit, 68 | AVG(workplaces_percent_change_from_baseline) as avg_workplace, 69 | AVG(residential_percent_change_from_baseline) as avg_residential 70 | FROM `bigquery-public-data.covid19_google_mobility.mobility_report` 71 | GROUP BY country_region, date 72 | ) AS t1 73 | WHERE 74 | CONCAT(t0.country_name, t0.date) = CONCAT(t1.country_region, t1.date) 75 | 76 | 77 | 78 | 79 | SELECT country_name, population 80 | FROM `.` 81 | WHERE population is NULL 82 | 83 | 84 | 85 | SELECT country_name, country_area 86 | FROM `.` 87 | WHERE WHERE country_area IS NULL 88 | 89 | 90 | SELECT DISTINCT country_name 91 | FROM `.` 92 | WHERE population is NULL 93 | UNION ALL 94 | SELECT DISTINCT country_name 95 | FROM `.` 96 | WHERE WHERE country_area IS NULL 97 | ORDER BY country_name ASC 98 | 99 | 100 | 101 | 102 | -------------------------------------------------------------------------------- /Getting Started: Create and Manage Cloud Resources: Challenge: -------------------------------------------------------------------------------- 1 | #WATCH FULL VIDEO ON : https://www.youtube.com/watch?v=EOAjT5V8ZqY 2 | 3 | Step 1:gcloud compute instances create nucleus-jumphost \ 4 | --network nucleus-vpc \ 5 | --zone us-east1-b \ 6 | --machine-type f1-micro \ 7 | --image-family debian-9 \ 8 | --image-project debian-cloud \ 9 | --scopes cloud-platform \ 10 | --no-address 11 | 12 | 13 | Step 2:gcloud container clusters create nucleus-backend \ 14 | --num-nodes 1 \ 15 | --network nucleus-vpc \ 16 | --region us-east1 17 | gcloud container clusters get-credentials nucleus-backend \ 18 | --region us-east1 19 | 20 | kubectl create deployment hello-server \ 21 | --image=gcr.io/google-samples/hello-app:2.0 22 | 23 | kubectl expose deployment hello-server \ 24 | --type=LoadBalancer \ 25 | --port 8080 26 | 27 | 28 | Step 3:kubectl get pods 29 | 30 | Step 4:cat << EOF > startup.sh 31 | #! /bin/bash 32 | apt-get update 33 | apt-get install -y nginx 34 | service nginx start 35 | sed -i -- 's/nginx/Google Cloud Platform - '"\$HOSTNAME"'/' /var/www/html/index.nginx-debian.html 36 | EOF 37 | 38 | Step 5:gcloud compute instance-templates create web-server-template \ 39 | --metadata-from-file startup-script=startup.sh \ 40 | --network nucleus-vpc \ 41 | --machine-type g1-small \ 42 | --region us-east1 43 | 44 | 45 | Step 6:gcloud compute instance-groups managed create web-server-group \ 46 | --base-instance-name web-server \ 47 | --size 2 \ 48 | --template web-server-template \ 49 | --region us-east1 50 | 51 | 52 | 53 | Step 7:gcloud compute firewall-rules create web-server-firewall \ 54 | --allow tcp:80 \ 55 | --network nucleus-vpc 56 | 57 | Step 8:gcloud compute http-health-checks create http-basic-check 58 | 59 | 60 | Step 9:gcloud compute instance-groups managed \ 61 | set-named-ports web-server-group \ 62 | --named-ports http:80 \ 63 | --region us-east1 64 | 65 | Step 10: gcloud compute backend-services create web-server-backend \ 66 | --protocol HTTP \ 67 | --http-health-checks http-basic-check \ 68 | --global 69 | 70 | Step 11: gcloud compute backend-services add-backend web-server-backend \ 71 | --instance-group web-server-group \ 72 | --instance-group-region us-east1 \ 73 | --global 74 | 75 | Step 12: gcloud compute url-maps create web-server-map \ 76 | --default-service web-server-backend 77 | gcloud compute target-http-proxies create http-lb-proxy \ 78 | --url-map web-server-map 79 | 80 | 81 | 82 | 83 | Step 13: gcloud compute forwarding-rules create http-content-rule \ 84 | --global \ 85 | --target-http-proxy http-lb-proxy \ 86 | --ports 80 87 | 88 | Step 14: gcloud compute forwarding-rules list 89 | -------------------------------------------------------------------------------- /Create ML Models with BigQuery ML: Challenge Lab: -------------------------------------------------------------------------------- 1 | Complete each of the sets of steps below to prepare the lab for the activity tracking assessment for each task in the lab. 2 | 3 | Task 1: Create a dataset to store your machine learning models 4 | It can be called any name to pass the test but for the remaining instructions to work use `austin` as the dataset name. 5 | 6 | bq mk austin 7 | 8 | 9 | 10 | Task 2: Create a forecasting BigQuery machine learning model. 11 | Create the first ML model using a JOIN between two bike share tables. Again any names will work but keep them as ‘austin_1’ and ‘austin_2’ for the remaining instructions to work. 12 | 13 | BigQuery Console Query Editor 14 | 15 | CREATE OR REPLACE MODEL austin.location_model 16 | OPTIONS 17 | (model_type='linear_reg', labels=['duration_minutes']) AS 18 | SELECT 19 | start_station_name, 20 | EXTRACT(HOUR FROM start_time) AS start_hour, 21 | EXTRACT(DAYOFWEEK FROM start_time) AS day_of_week, 22 | duration_minutes, 23 | address as location 24 | FROM 25 | `bigquery-public-data.austin_bikeshare.bikeshare_trips` AS trips 26 | JOIN 27 | `bigquery-public-data.austin_bikeshare.bikeshare_stations` AS stations 28 | ON 29 | trips.start_station_name = stations.name 30 | WHERE 31 | EXTRACT(YEAR FROM start_time) = 2018 32 | AND duration_minutes > 0 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | Task 3: Create the second machine learning model. 41 | 42 | BigQuery Console Query Editor 43 | CREATE OR REPLACE MODEL austin.subscriber_model 44 | OPTIONS 45 | (model_type='linear_reg', labels=['duration_minutes']) AS 46 | SELECT 47 | start_station_name, 48 | EXTRACT(HOUR FROM start_time) AS start_hour, 49 | subscriber_type, 50 | duration_minutes 51 | FROM `bigquery-public-data.austin_bikeshare.bikeshare_trips` AS trips 52 | WHERE EXTRACT(YEAR FROM start_time) = 2018 53 | 54 | 55 | 56 | 57 | 58 | Task 4: Evaluate the two machine learning models. 59 | 60 | BigQuery Console Query Editor 61 | Query 1 62 | 63 | -- Evaluation metrics for location_model 64 | SELECT 65 | SQRT(mean_squared_error) AS rmse, 66 | mean_absolute_error 67 | FROM 68 | ML.EVALUATE(MODEL austin.location_model, ( 69 | SELECT 70 | start_station_name, 71 | EXTRACT(HOUR FROM start_time) AS start_hour, 72 | EXTRACT(DAYOFWEEK FROM start_time) AS day_of_week, 73 | duration_minutes, 74 | address as location 75 | FROM 76 | `bigquery-public-data.austin_bikeshare.bikeshare_trips` AS trips 77 | JOIN 78 | `bigquery-public-data.austin_bikeshare.bikeshare_stations` AS stations 79 | ON 80 | trips.start_station_name = stations.name 81 | WHERE EXTRACT(YEAR FROM start_time) = 2019) 82 | ) 83 | 84 | 85 | 86 | 87 | Query 2 88 | -- Evaluation metrics for subscriber_model 89 | SELECT 90 | SQRT(mean_squared_error) AS rmse, 91 | mean_absolute_error 92 | FROM 93 | ML.EVALUATE(MODEL austin.subscriber_model, ( 94 | SELECT 95 | start_station_name, 96 | EXTRACT(HOUR FROM start_time) AS start_hour, 97 | subscriber_type, 98 | duration_minutes 99 | FROM 100 | `bigquery-public-data.austin_bikeshare.bikeshare_trips` AS trips 101 | WHERE 102 | EXTRACT(YEAR FROM start_time) = 2019) 103 | ) 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | Task 5: Use the subscriber type machine learning model to predict average trip durations 113 | Use the second model, that model (model austin_2 in this case) to to predict the average duration length of all trips from the busiest rental station in 2019 (based on the number of rentals per station in 2019) where the subscriber_type=’Single Trip’. 114 | 115 | 116 | The following query will list busiest stations in descending order. The busiest station for 2019 was “21st & Speedway @PCL”. 117 | 118 | BigQuery Console Query Editor 119 | SELECT 120 | start_station_name, 121 | COUNT(*) AS trips 122 | FROM 123 | `bigquery-public-data.austin_bikeshare.bikeshare_trips` 124 | WHERE 125 | EXTRACT(YEAR FROM start_time) = 2019 126 | GROUP BY 127 | start_station_name 128 | ORDER BY 129 | trips DESC 130 | 131 | 132 | 133 | 134 | 135 | Then predict trip length. 136 | 137 | BigQuery Console Query Editor 138 | SELECT AVG(predicted_duration_minutes) AS average_predicted_trip_length 139 | FROM ML.predict(MODEL austin.subscriber_model, ( 140 | SELECT 141 | start_station_name, 142 | EXTRACT(HOUR FROM start_time) AS start_hour, 143 | subscriber_type, 144 | duration_minutes 145 | FROM 146 | `bigquery-public-data.austin_bikeshare.bikeshare_trips` 147 | WHERE 148 | EXTRACT(YEAR FROM start_time) = 2019 149 | AND subscriber_type = 'Single Trip' 150 | AND start_station_name = '21st & Speedway @PCL')) 151 | -------------------------------------------------------------------------------- /analyze-images.py: -------------------------------------------------------------------------------- 1 | # Dataset: image_classification_dataset 2 | # Table name: image_text_detail 3 | import os 4 | import sys 5 | 6 | # Import Google Cloud Library modules 7 | from google.cloud import storage, bigquery, language, vision 8 | client = vision.ImageAnnotatorClient() 9 | 10 | if ('GOOGLE_APPLICATION_CREDENTIALS' in os.environ): 11 | if (not os.path.exists(os.environ['GOOGLE_APPLICATION_CREDENTIALS'])): 12 | print ("The GOOGLE_APPLICATION_CREDENTIALS file does not exist.\n") 13 | exit() 14 | else: 15 | print ("The GOOGLE_APPLICATION_CREDENTIALS environment variable is not defined.\n") 16 | exit() 17 | 18 | if len(sys.argv)<3: 19 | print('You must provide parameters for the Google Cloud project ID and Storage bucket') 20 | print ('python3 '+sys.argv[0]+ '[PROJECT_NAME] [BUCKET_NAME]') 21 | exit() 22 | 23 | project_name = sys.argv[1] 24 | bucket_name = sys.argv[2] 25 | 26 | # Set up our GCS, BigQuery, and Natural Language clients 27 | storage_client = storage.Client() 28 | bq_client = bigquery.Client(project=project_name) 29 | nl_client = language.LanguageServiceClient() 30 | 31 | # Set up client objects for the vision and translate_v2 API Libraries 32 | vision_client = vision.ImageAnnotatorClient() 33 | 34 | # Setup the BigQuery dataset and table objects 35 | dataset_ref = bq_client.dataset('image_classification_dataset') 36 | dataset = bigquery.Dataset(dataset_ref) 37 | table_ref = dataset.table('image_text_detail') 38 | table = bq_client.get_table(table_ref) 39 | 40 | # Create an array to store results data to be inserted into the BigQuery table 41 | rows_for_bq = [] 42 | 43 | # Get a list of the files in the Cloud Storage Bucket 44 | files = storage_client.bucket(bucket_name).list_blobs() 45 | bucket = storage_client.bucket(bucket_name) 46 | 47 | print('Processing image files from GCS. This will take a few minutes..') 48 | 49 | # Process files from Cloud Storage and save the result to send to BigQuery 50 | for file in files: 51 | if file.name.endswith('jpg') or file.name.endswith('png'): 52 | file_content = file.download_as_string() 53 | 54 | # TBD: Create a Vision API image object called image_object 55 | image_object = vision.Image(content=file_content) 56 | # Ref: https://googleapis.dev/python/vision/latest/gapic/v1/types.html#google.cloud.vision_v1.types.Image 57 | 58 | 59 | # TBD: Detect text in the image and save the response data into an object called response 60 | response = client.text_detection(image=image_object) 61 | # Ref: https://googleapis.dev/python/vision/latest/gapic/v1/api.html#google.cloud.vision_v1.ImageAnnotatorClient.document_text_detection 62 | 63 | 64 | # Save the text content found by the vision API into a variable called text_data 65 | text_data = response.text_annotations[0].description 66 | 67 | # Save the text detection response data in .txt to cloud storage 68 | file_name = file.name.split('.')[0] + '.txt' 69 | blob = bucket.blob(file_name) 70 | # Upload the contents of the text_data string variable to the Cloud Storage file 71 | blob.upload_from_string(text_data, content_type='text/plain') 72 | 73 | # Extract the description and locale data from the response file 74 | # into variables called desc and locale 75 | # using response object properties e.g. response.text_annotations[0].description 76 | desc = response.text_annotations[0].description 77 | locale = response.text_annotations[0].locale 78 | 79 | # if the locale is English (en) save the description as the translated_txt 80 | if locale == 'en': 81 | translated_text = desc 82 | else: 83 | # TBD: For non EN locales pass the description data to the translation API 84 | # ref: https://googleapis.dev/python/translation/latest/client.html#google.cloud.translate_v2.client.Client.translate 85 | # Set the target_language locale to 'en') 86 | from google.cloud import translate_v2 as translate 87 | translate_client = translate.Client() 88 | translation = translate_client.translate(desc, target_language='en',format_='text') 89 | translated_text = translation['translatedText'] 90 | print(translated_text) 91 | 92 | # if there is response data save the original text read from the image, 93 | # the locale, translated text, and filename 94 | if len(response.text_annotations) > 0: 95 | rows_for_bq.append((desc, locale, translated_text, file.name)) 96 | 97 | print('Writing Vision API image data to BigQuery...') 98 | # Write original text, locale and translated text to BQ 99 | # TBD: When the script is working uncomment the next line to upload results to BigQuery 100 | errors = bq_client.insert_rows(table, rows_for_bq) 101 | assert errors == [] 102 | -------------------------------------------------------------------------------- /Insights from Data with BigQuery: Challenge Lab(Updated task 9): -------------------------------------------------------------------------------- 1 | #WATCH FULL VIDEO ON : https://www.youtube.com/watch?v=NFBAMZrQmOY 2 | 3 | 1 4 | 5 | SELECT sum(cumulative_confirmed) as total_cases_worldwide FROM `bigquery-public-data.covid19_open_data.covid19_open_data` where date='2020-04-15' 6 | 7 | 2 8 | 9 | with deaths_by_states as ( 10 | SELECT subregion1_name as state, sum(cumulative_deceased) as death_count 11 | FROM `bigquery-public-data.covid19_open_data.covid19_open_data` 12 | where country_name="United States of America" and date='2020-04-10' and subregion1_name is NOT NULL 13 | group by subregion1_name 14 | ) 15 | 16 | select count(*) as count_of_states 17 | from deaths_by_states 18 | where death_count > 100 19 | 20 | 3 21 | 22 | SELECT subregion1_name as state, sum(cumulative_confirmed) as total_confirmed_cases 23 | FROM `bigquery-public-data.covid19_open_data.covid19_open_data` 24 | where country_name="United States of America" and date='2020-04-10' and subregion1_name is NOT NULL 25 | group by subregion1_name 26 | having total_confirmed_cases > 1000 27 | order by total_confirmed_cases desc 28 | 29 | 30 | 31 | 4 32 | 33 | select sum(cumulative_confirmed) as total_confirmed_cases, sum(cumulative_deceased) as total_deaths, (sum(cumulative_deceased)/sum(cumulative_confirmed))*100 as case_fatality_ratio 34 | from `bigquery-public-data.covid19_open_data.covid19_open_data` 35 | where country_name="Italy" and date BETWEEN "2020-04-01" AND "2020-04-30" 36 | 37 | 38 | 5 39 | 40 | SELECT date 41 | FROM `bigquery-public-data.covid19_open_data.covid19_open_data` 42 | where country_name="Italy" and cumulative_deceased>10000 43 | order by date asc 44 | limit 1 45 | 46 | 47 | 6 48 | 49 | WITH india_cases_by_date AS ( 50 | SELECT 51 | date, 52 | SUM( cumulative_confirmed ) AS cases 53 | FROM 54 | `bigquery-public-data.covid19_open_data.covid19_open_data` 55 | WHERE 56 | country_name ="India" 57 | AND date between '2020-02-21' and '2020-03-15' 58 | GROUP BY 59 | date 60 | ORDER BY 61 | date ASC 62 | ) 63 | , india_previous_day_comparison AS 64 | (SELECT 65 | date, 66 | cases, 67 | LAG(cases) OVER(ORDER BY date) AS previous_day, 68 | cases - LAG(cases) OVER(ORDER BY date) AS net_new_cases 69 | FROM india_cases_by_date 70 | ) 71 | select count(*) 72 | from india_previous_day_comparison 73 | where net_new_cases=0 74 | 75 | 76 | 7 77 | 78 | WITH us_cases_by_date AS ( 79 | SELECT 80 | date, 81 | SUM(cumulative_confirmed) AS cases 82 | FROM 83 | `bigquery-public-data.covid19_open_data.covid19_open_data` 84 | WHERE 85 | country_name="United States of America" 86 | AND date between '2020-03-22' and '2020-04-20' 87 | GROUP BY 88 | date 89 | ORDER BY 90 | date ASC 91 | ) 92 | , us_previous_day_comparison AS 93 | (SELECT 94 | date, 95 | cases, 96 | LAG(cases) OVER(ORDER BY date) AS previous_day, 97 | cases - LAG(cases) OVER(ORDER BY date) AS net_new_cases, 98 | (cases - LAG(cases) OVER(ORDER BY date))*100/LAG(cases) OVER(ORDER BY date) AS percentage_increase 99 | FROM us_cases_by_date 100 | ) 101 | select Date, cases as Confirmed_Cases_On_Day, previous_day as Confirmed_Cases_Previous_Day, percentage_increase as Percentage_Increase_In_Cases 102 | from us_previous_day_comparison 103 | where percentage_increase > 10 104 | 105 | 106 | 107 | 8 108 | 109 | WITH cases_by_country AS ( 110 | SELECT 111 | country_name AS country, 112 | sum(cumulative_confirmed) AS cases, 113 | sum(cumulative_recovered) AS recovered_cases 114 | FROM 115 | bigquery-public-data.covid19_open_data.covid19_open_data 116 | WHERE 117 | date = '2020-05-10' 118 | GROUP BY 119 | country_name 120 | ) 121 | , recovered_rate AS 122 | (SELECT 123 | country, cases, recovered_cases, 124 | (recovered_cases * 100)/cases AS recovery_rate 125 | FROM cases_by_country 126 | ) 127 | SELECT country, cases AS confirmed_cases, recovered_cases, recovery_rate 128 | FROM recovered_rate 129 | WHERE cases > 50000 130 | ORDER BY recovery_rate desc 131 | LIMIT 10 132 | 133 | 134 | 135 | 9 136 | 137 | WITH 138 | france_cases AS ( 139 | SELECT 140 | date, 141 | SUM(cumulative_confirmed) AS total_cases 142 | FROM 143 | `bigquery-public-data.covid19_open_data.covid19_open_data` 144 | WHERE 145 | country_name="France" 146 | AND date IN ('2020-01-24', 147 | '2020-05-10') 148 | GROUP BY 149 | date 150 | ORDER BY 151 | date) 152 | , summary as ( 153 | SELECT 154 | total_cases AS first_day_cases, 155 | LEAD(total_cases) OVER(ORDER BY date) AS last_day_cases, 156 | DATE_DIFF(LEAD(date) OVER(ORDER BY date),date, day) AS days_diff 157 | FROM 158 | france_cases 159 | LIMIT 1 160 | ) 161 | select first_day_cases, last_day_cases, days_diff, POW((last_day_cases/first_day_cases),(1/days_diff))-1 as cdgr 162 | from summary 163 | 164 | 165 | 166 | 10 167 | 168 | SELECT 169 | date, SUM(cumulative_confirmed) AS country_cases, 170 | SUM(cumulative_deceased) AS country_deaths 171 | FROM 172 | `bigquery-public-data.covid19_open_data.covid19_open_data` 173 | WHERE 174 | date BETWEEN '2020-03-15' 175 | AND '2020-04-30' 176 | AND country_name ="United States of America" 177 | GROUP BY date 178 | 179 | 180 | After input the '10' query 181 | Click "Explore Data" > "Explore with Data Studio" 182 | Authorize your BigQuery in Data Studio 183 | Save your Data Studio Explorer 184 | Get Started > Check "I acknowlegde..." > Accept > Yes to All > Done 185 | Check Your Progress 186 | --------------------------------------------------------------------------------