├── assets ├── cv_mouad.pdf ├── academic │ ├── bac.jpg │ ├── esi.jpg │ ├── uni.jpg │ └── paris.jpg ├── home │ ├── my_zone.png │ └── profile-pic.png ├── work │ ├── alexsys │ │ ├── auth.png │ │ ├── home.png │ │ ├── alexsys.png │ │ └── add_person.png │ ├── astek │ │ ├── astek.png │ │ ├── data.png │ │ ├── sensor.png │ │ ├── activity.png │ │ ├── database.png │ │ └── navigation.png │ ├── aqsone │ │ ├── aqsone.png │ │ ├── example_1.png │ │ └── example_2.png │ └── digitaladvisor │ │ └── digitaladvisor.png ├── certifications │ ├── nlp.png │ ├── aws_ml.jpg │ ├── nn_dl.png │ ├── aws_pract.png │ ├── dl_tuning.png │ └── ml_stanford.png └── personal_projects │ ├── e2e.png │ └── three_models.png ├── __pycache__ └── utils.cpython-311.pyc ├── README.md ├── requirements.txt ├── .streamlit └── config.toml ├── styles └── main.css ├── .devcontainer └── devcontainer.json ├── 1_🏡_Home.py └── pages ├── 2_🏫_Academic_Background.py ├── 5_🥇_Certifications.py ├── 4_🧪_Personal_Projects.py └── 3_⚒️_Professional_Experiences.py /assets/cv_mouad.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/cv_mouad.pdf -------------------------------------------------------------------------------- /assets/academic/bac.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/academic/bac.jpg -------------------------------------------------------------------------------- /assets/academic/esi.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/academic/esi.jpg -------------------------------------------------------------------------------- /assets/academic/uni.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/academic/uni.jpg -------------------------------------------------------------------------------- /assets/home/my_zone.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/home/my_zone.png -------------------------------------------------------------------------------- /assets/academic/paris.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/academic/paris.jpg -------------------------------------------------------------------------------- /assets/home/profile-pic.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/home/profile-pic.png -------------------------------------------------------------------------------- /assets/work/alexsys/auth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/alexsys/auth.png -------------------------------------------------------------------------------- /assets/work/alexsys/home.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/alexsys/home.png -------------------------------------------------------------------------------- /assets/work/astek/astek.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/astek/astek.png -------------------------------------------------------------------------------- /assets/work/astek/data.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/astek/data.png -------------------------------------------------------------------------------- /assets/work/astek/sensor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/astek/sensor.png -------------------------------------------------------------------------------- /assets/certifications/nlp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/certifications/nlp.png -------------------------------------------------------------------------------- /assets/work/aqsone/aqsone.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/aqsone/aqsone.png -------------------------------------------------------------------------------- /assets/work/astek/activity.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/astek/activity.png -------------------------------------------------------------------------------- /assets/work/astek/database.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/astek/database.png -------------------------------------------------------------------------------- /__pycache__/utils.cpython-311.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/__pycache__/utils.cpython-311.pyc -------------------------------------------------------------------------------- /assets/certifications/aws_ml.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/certifications/aws_ml.jpg -------------------------------------------------------------------------------- /assets/certifications/nn_dl.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/certifications/nn_dl.png -------------------------------------------------------------------------------- /assets/personal_projects/e2e.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/personal_projects/e2e.png -------------------------------------------------------------------------------- /assets/work/alexsys/alexsys.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/alexsys/alexsys.png -------------------------------------------------------------------------------- /assets/work/aqsone/example_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/aqsone/example_1.png -------------------------------------------------------------------------------- /assets/work/aqsone/example_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/aqsone/example_2.png -------------------------------------------------------------------------------- /assets/work/astek/navigation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/astek/navigation.png -------------------------------------------------------------------------------- /assets/certifications/aws_pract.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/certifications/aws_pract.png -------------------------------------------------------------------------------- /assets/certifications/dl_tuning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/certifications/dl_tuning.png -------------------------------------------------------------------------------- /assets/work/alexsys/add_person.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/alexsys/add_person.png -------------------------------------------------------------------------------- /assets/certifications/ml_stanford.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/certifications/ml_stanford.png -------------------------------------------------------------------------------- /assets/personal_projects/three_models.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/personal_projects/three_models.png -------------------------------------------------------------------------------- /assets/work/digitaladvisor/digitaladvisor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/alexfazio/streamlit_resume/main/assets/work/digitaladvisor/digitaladvisor.png -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # My Digital CV 2 | 3 | This project's goal is to showcase : 4 | 5 | - My experience in the tech industry. 6 | - Skills and background 7 | - Certifications and trainings 8 | 9 | The resume is starting to look a bit too generic. This is my attempt at making the experience more fluid and enjoyable to you, whoever you are! 10 | 11 | Have fun. 12 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | altair==4.1.0 2 | Pillow==9.4.0 3 | streamlit==1.16.0 4 | streamlit-camera-input-live==0.2.0 5 | streamlit-card==0.0.4 6 | streamlit-embedcode==0.1.2 7 | streamlit-extras==0.2.7 8 | streamlit-faker==0.0.2 9 | streamlit-image-coordinates==0.1.3 10 | streamlit-keyup==0.2.0 11 | streamlit-toggle-switch==1.0.2 12 | streamlit-vertical-slider==1.0.2 13 | -------------------------------------------------------------------------------- /.streamlit/config.toml: -------------------------------------------------------------------------------- 1 | [theme] 2 | # Primary accent color for interactive elements. 3 | primaryColor = "#d33682" 4 | 5 | # Background color for the main content area. 6 | backgroundColor = "#22272E" 7 | 8 | # Background color used for the sidebar and most interactive widgets. 9 | secondaryBackgroundColor = "#537188" 10 | 11 | # Color used for almost all text. 12 | textColor = "#fff" -------------------------------------------------------------------------------- /styles/main.css: -------------------------------------------------------------------------------- 1 | @import url('https://fonts.googleapis.com/css2?family=Readex+Pro:wght@300;400;500;600;700&display=swap'); 2 | 3 | 4 | * {font-family: 'Readex Pro';} 5 | 6 | 7 | a { 8 | text-decoration: none; 9 | color: white !important; 10 | font-weight: 500; 11 | } 12 | 13 | a:hover { 14 | color: #d33682 !important; 15 | text-decoration: none; 16 | } 17 | 18 | ul {list-style-type: none;} 19 | 20 | hr { 21 | margin-top: 0px; 22 | margin-bottom: 5%; 23 | } 24 | 25 | #MainMenu {visibility: visible;} 26 | footer {visibility: hidden;} 27 | header {visibility: visible;} -------------------------------------------------------------------------------- /.devcontainer/devcontainer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Python 3", 3 | // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile 4 | "image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye", 5 | "customizations": { 6 | "codespaces": { 7 | "openFiles": [ 8 | "README.md", 9 | "1_🏡_Home.py" 10 | ] 11 | }, 12 | "vscode": { 13 | "settings": {}, 14 | "extensions": [ 15 | "ms-python.python", 16 | "ms-python.vscode-pylance" 17 | ] 18 | } 19 | }, 20 | "updateContentCommand": "[ -f packages.txt ] && sudo apt update && sudo apt upgrade -y && sudo xargs apt install -y {}".format(f.read()), unsafe_allow_html=True) 61 | 62 | profile_pic = Image.open(profile_pic) 63 | 64 | my_zone_pic = Image.open(my_zone_pic) 65 | # ------ HERO SECTION ----------- 66 | 67 | cols = st.columns(2, gap='small') 68 | 69 | with cols[0]: 70 | st.image(profile_pic, width=230) 71 | 72 | 73 | with cols[1]: 74 | st.title(NAME) 75 | st.write(DESCRIPTION) 76 | st.download_button( 77 | label="📄 Download Resume", 78 | data= PDFbyte, 79 | file_name=resume_file.name, 80 | mime="application/octet-stream" 81 | ) 82 | st.write("📫",EMAIL) 83 | 84 | 85 | # -------- SOCIALS --------- 86 | 87 | V_SPACE(1) 88 | 89 | cols = st.columns(len(SOCIAL_MEDIA)) 90 | for index, (platform,link) in enumerate(SOCIAL_MEDIA.items()): 91 | cols[index].write(f"[{platform}]({link})") 92 | 93 | 94 | # ------- EXPERIENCE AND QUALIFS -------- 95 | 96 | V_SPACE(1) 97 | st.subheader('About me 🛝') 98 | st.write( 99 | """ 100 | - ✔️ **3 years of experience** in data science consulting firms for clients like Total Energies , ONCF , Nexans, Allegro Musique (Details in Professional Experiences) 101 | - ✔️ Built multiple ML based web applications (Python, Javascript, D3js, Streamlit) with deployment in AWS **(Sagemaker, API Gateway, Lambda).** 102 | - ✔️ Expertise in statistical principles and classical ML models 103 | - ✔️ Product and value oriented mindset ( my dream is to build valuable ML tools, my nightmare is models dying in notebooks ) 104 | - ✔️ Work feels best when it's **challenging enough to push me and not easy enough to make me bored** 105 | """ 106 | ,unsafe_allow_html=True) 107 | st.image(my_zone_pic) 108 | st.write(""" ⚠️ Warning : if you hand me a boring task I will try to automate it.""",unsafe_allow_html=True) 109 | # --- SKILLS --- 110 | st.write('\n') 111 | st.subheader("Hard Skills 🔬") 112 | st.write( 113 | """ 114 | - 👩‍💻 Programming: Python, SQL, pySpark 115 | - 🧪 Data science : Machine Learning, Ensemble methods (Bagging, Boosting) / kernel methods (SVM, SPCA), Deep Learning, Natural Language Processing, Optimisation 116 | - 📊 Data Visulization: PowerBi, Qlicksense, D3js 117 | - 📚 Transfer Learning: LLMS, CNNs, Transformers ... 118 | - 🗄️ Databases: Postgres, MongoDB, MySQL (on Premise and Cloud) 119 | - ☁️ Cloud : AWS (Certified Cloud Practitioner (CLF)), Palantir Foundry 120 | - 🚀 Deployment : Docker, Heroku, AWS 121 | """ 122 | ) 123 | go_to_full_page("See my certifications and trainings" , "Certifications") 124 | 125 | # --------- work history --------- 126 | V_SPACE(1) 127 | st.subheader("Recent Job Experience 🧑‍💻") 128 | st.write('---') 129 | 130 | st.write('\n') 131 | st.write("🚧", "**Data Scientist | Aqsone**") 132 | st.write("09/2022 - Present") 133 | st.write( 134 | """ 135 | - ► Collaborated on the creation of a Digital costing solution that predicts cost of clothing items using Image and description, based on Convolutional Neural Networks and Transformers. 136 | - ► Development of a 360° Procurement solution using Python, AWS MySQL and Google Data Studio with interactive dashboards including Forecasts, Spend Analysis, Supplier audit, CO2 emissions and more. 137 | - ► Participation in the creation of a Succession Planning solution that uses Machine Learning for optimal successor choice, using d3js for visualizations. 138 | - ► Commercial work : Participation in the the developement of multiple proofs of concept to demonstrate to prospects and clients for biz dev purposes 139 | - ► Internal work : Along 2 other data scientist and an Agile coach, we handle the management of different courses and certifications for the rest of the company. 140 | - ► Internal work : organization monthly presentations about the state of the art in the fields of data, AI and ML. As well as introduce new tools to our collaborators 141 | """ , unsafe_allow_html=True 142 | ) 143 | 144 | go_to_full_page("Check out all my experiences" , "Professional Experiences") 145 | 146 | 147 | # --- Projects & Accomplishments --- 148 | st.write('\n') 149 | st.subheader("Personal Projects 🧙‍♂️") 150 | st.write("---") 151 | for project, link in PROJECTS.items(): 152 | st.write(f"[{project}]({link})") 153 | 154 | 155 | 156 | go_to_full_page("More Personal Projects" , "Personal Projects") 157 | -------------------------------------------------------------------------------- /pages/2_🏫_Academic_Background.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import streamlit as st 4 | from streamlit_extras.switch_page_button import switch_page 5 | from PIL import Image 6 | 7 | # ------------ CONSTANTS ---------- 8 | PAGE_TITLE = "Academia | Et-tali Mouad" 9 | PAGE_ICON = "🏛" 10 | NAME = "Et-tali Mouad" 11 | DESCRIPTION = """ 12 | My Academic Journey was always influenced by my love for maths and coding Starting from my Mathematical sciences Baccalaureate down to my latest experience 13 | """ 14 | 15 | # ---------- PARIS_CITE------------------ 16 | PARIS_CITE_ICON = "📜" 17 | PARIS_CITE_TITLE = "**Master 2 Machine learning for Data Science | Université Paris-Descartes**" 18 | PARIS_CITE_PERIOD = "09/2021 - 09/2022" 19 | PARIS_CITE_DESCRIPTION = """ 20 | - 🔸 **Relevent Courses:** Unsupervised Learning, Deep Learning, Guassian Mixture Models, Recommender Systems. 21 | - 🔸 **Motivation for Masters :** Deepening of machine learning theoretical foundations and honing my data science skills. 22 | - 🔸 **Difficulties :** Adjusting to the level of knowledge and skill required in internship and job interviews. 23 | - 🔸 **What I learned :** The best way to learn is to enjoy the process of going from 0 to 1 and not be frustrated at 0.5. 24 | """ 25 | # --------------------------------- 26 | 27 | # ---------- ESI ------------------ 28 | ESI_ICON = "🧑🏻‍💻" 29 | ESI_TITLE = "**Engineering Diploma in Data science and knowledge management | Université des Sciences de L'information**" 30 | ESI_PERIOD = "09/2017 - 09/2020" 31 | ESI_DESCRIPTION = """ 32 | - 🔸 **Relevent Courses:** Information Theory, Statistics, Algorithmic, Data science, Machine Learning, Python, Java, SQL. 33 | - 🔸 **Difficulties :** Engineering diplomas are very time demanding even more than prep school, **time management** is key when it comes to attending 8 hours of courses then prepare 3-4 projects simulteanously. 34 | - 🔸 **What I learned :** I learned that I chose a path in life where I need to be **very organized** , and that realisation is important in of itself. 35 | """ 36 | # -------------------------------------- 37 | 38 | # ---------- PREP ------------------ 39 | PREP_ICON = "🚀" 40 | PREP_TITLE = "**Preparatory classes for grand university (Maths/Physics) | Prep School AL-KHANSAA**" 41 | PREP_PERIOD = "09/2015 - 09/2017" 42 | PREP_DESCRIPTION = """ 43 | - 🔸 **Relevent Courses:** Mathematics (Linear Algebra, Probabilities, Calculus), Physics, Algorithmic, Phylosophy. 44 | - 🔸 **Motivation for Prep School :** At this point in life, I didn't know exactly what I wanted to do in life but I knew that prep school was the hardest thing to do after highschool so I chose the hard way. 45 | - 🔸 **Difficulties :** Adjusting to the level of deep understanding of mathematical concepts especially in calculus for maths and quantum physics in physics 46 | - 🔸 **What I learned :** I learned that every person has their own pace and talents, some people understand things faster than others. The important thing is to be compassionate and treat people with kindess. 47 | """ 48 | # -------------------------------------- 49 | 50 | # ---------- BAC ------------------ 51 | BAC_ICON = "🎒" 52 | BAC_TITLE = "**Scientific Baccalaureate Mathematical Sciences | Groupe Scholaire Ouhoud**" 53 | BAC_PERIOD = "06/2015" 54 | BAC_DESCRIPTION = """ 55 | - 🔸 **Relevent Courses:** Mathematics, Physics, Phylosophy. 56 | - 🔸 **Difficulties :** The pressure from society to do good in that particular exam, and the expectations of parents and teachers who viewed us as "exellent students who are definitively headed to prep school" 57 | - 🔸 **What I learned :** All things must come to an end. 58 | """ 59 | # -------------------------------------- 60 | 61 | st.set_page_config(page_title=PAGE_TITLE, page_icon=PAGE_ICON,layout="centered") 62 | 63 | 64 | # ------------ PATH SETTINGS ---------- 65 | current_dir = Path(__file__).parent.parent 66 | 67 | css_file = current_dir / "styles" / "main.css" 68 | 69 | uni_pic = Image.open(current_dir / "assets" / "academic" / "uni.jpg") 70 | 71 | esi_pic = Image.open(current_dir / "assets" / "academic" / "esi.jpg") 72 | 73 | paris_pic = Image.open(current_dir / "assets" / "academic" / "paris.jpg") 74 | 75 | bac_pic = Image.open(current_dir / "assets" / "academic" / "bac.jpg") 76 | 77 | 78 | st.title("Academic Background") 79 | 80 | # --------------- HELPER FUNCTIONS ----------------------- 81 | def V_SPACE(lines): 82 | for _ in range(lines): 83 | st.write(' ') 84 | 85 | def go_to_full_page(label,page): 86 | personal_project = st.button(label) 87 | if personal_project: 88 | switch_page(page) 89 | 90 | def create_background_section(ICON, BACKGROUND_TITLE,BACKGROUND_PERIOD,BACKGROUND_DESCRIPTION): 91 | st.write('\n') 92 | st.write(ICON, BACKGROUND_TITLE) 93 | st.write(BACKGROUND_PERIOD) 94 | st.write(BACKGROUND_DESCRIPTION, unsafe_allow_html=True) 95 | 96 | # ----------- CSS, PDF & Profile Pic SETTINGS -------------- 97 | 98 | with open(css_file) as f: 99 | st.markdown("".format(f.read()), unsafe_allow_html=True) 100 | 101 | 102 | # ------ HERO SECTION ----------- 103 | cols = st.columns(2, gap='small') 104 | 105 | with cols[0]: 106 | st.image(uni_pic) 107 | 108 | 109 | with cols[1]: 110 | st.title(NAME) 111 | st.write(DESCRIPTION) 112 | 113 | 114 | # --------- BACKGROUND --------- 115 | st.subheader("My Journey 🚩") 116 | st.write('---') 117 | 118 | create_background_section(PARIS_CITE_ICON,PARIS_CITE_TITLE,PARIS_CITE_PERIOD,PARIS_CITE_DESCRIPTION) 119 | V_SPACE(1) 120 | st.image(paris_pic,width=900, caption="Getting ready for Internships and Job Hunting") 121 | st.write('----') 122 | 123 | create_background_section(ESI_ICON,ESI_TITLE,ESI_PERIOD,ESI_DESCRIPTION) 124 | st.image(esi_pic, caption="Building connections in Engineering University (I had already finished my turn)") 125 | st.write('----') 126 | 127 | create_background_section(PREP_ICON,PREP_TITLE,PREP_PERIOD,PREP_DESCRIPTION) 128 | st.write("""***PS: no picture for prep school period on purpose (if you know you know)*** 🙃""",unsafe_allow_html=True) 129 | st.write('----') 130 | 131 | create_background_section(BAC_ICON,BAC_TITLE,BAC_PERIOD,BAC_DESCRIPTION) 132 | cols = st.columns(3,gap="small") 133 | with cols[1]: 134 | st.image(bac_pic,caption="Blurry graduation because 2015 Android",width=280) 135 | st.write('----') -------------------------------------------------------------------------------- /pages/5_🥇_Certifications.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import streamlit as st 4 | from streamlit_extras.switch_page_button import switch_page 5 | from PIL import Image 6 | 7 | 8 | # ------------ PATH SETTINGS ---------- 9 | current_dir = Path(__file__).parent.parent 10 | css_file = current_dir / "styles" / "main.css" 11 | 12 | AWS_ML_CERTIFICATION_PIC_PATH = current_dir / "assets" / "certifications" / "aws_ml.jpg" 13 | ML_CERTIFICATION_PIC_PATH = current_dir / "assets" / "certifications" / "ml_stanford.png" 14 | NN_DL_CERTIFICATION_PIC_PATH = current_dir / "assets" / "certifications" / "nn_dl.png" 15 | DL_TUNE_CERTIFICATION_PIC_PATH = current_dir / "assets" / "certifications" / "dl_tuning.png" 16 | AWS_CERTIFICATION_PIC_PATH = current_dir / "assets" / "certifications" / "aws_pract.png" 17 | NLP_CERTIFICATION_PIC_PATH = current_dir / "assets" / "certifications" / "nlp.png" 18 | 19 | # ------------ CONSTANTS ---------- 20 | PAGE_TITLE = "Certifications | Et-tali Mouad" 21 | PAGE_ICON = "🏛" 22 | 23 | #-------- Certifications CONTENT---------- 24 | AWS_ML_CERTIFICATION_TITLE = "AWS Certified Machine Learning Specialty 2023 - Hands On!" 25 | AWS_ML_CERTIFICATION_PIC = Image.open(AWS_ML_CERTIFICATION_PIC_PATH) 26 | AWS_ML_CERTIFICATION_LINK = "https://www.udemy.com/certificate/UC-c0613ef8-6d5c-49aa-8429-c384dfec8d36/" 27 | AWS_ML_CERTIFICATION_DESCRIPTION = """ 28 | - ✔ **AWS Certified Machine Learning Specialty 2023 - Hands On!** is an depth hands on course training for the machine learning specialty certification by aws. 29 | - ✔ This certification teaches a wide set of high level ML aws services, their use cases and how to implement them in large scale solutions inside a bigger aws architecture. 30 | - ✔ Some of the services include SageMaker, canvas, Comprehend, Forecast, Kinesis, Glue and more. 31 | 32 | """ 33 | 34 | ML_CERTIFICATION_TITLE = "Machine Learning : Stanford University " 35 | ML_CERTIFICATION_PIC = Image.open(ML_CERTIFICATION_PIC_PATH) 36 | ML_CERTIFICATION_LINK = "https://www.coursera.org/account/accomplishments/verify/K52NAQ2FB8Z7" 37 | ML_CERTIFICATION_DESCRIPTION = """ 38 | - ✔ **The Machine Learning Specialization** is a foundational online program created in collaboration between **DeepLearning.AI and Stanford Online.** 39 | - ✔ This certification teaches the fundamentals of machine learning and how to use these techniques to build real-world AI applications. 40 | 41 | """ 42 | 43 | NN_DL_CERTIFICATION_TITLE = "Neural Networks and Deep Learning" 44 | NN_DL_CERTIFICATION_PIC = Image.open(NN_DL_CERTIFICATION_PIC_PATH) 45 | NN_DL_CERTIFICATION_LINK = "https://www.coursera.org/account/accomplishments/verify/M5SRBK44NKX3" 46 | NN_DL_CERTIFICATION_DESCRIPTION = """ 47 | - ✔ The Deep Learning Specialization is a foundational program created by **DeepLearning.AI** that will helped me understand the capabilities, challenges, and consequences of deep learning 48 | - ✔ It prepared me to participate in the development of leading-edge AI technology. 49 | - ✔ It provided a pathway for me to gain the knowledge and skills to apply Deep learning to my work, and level up my technical career. 50 | """ 51 | 52 | DL_TUNE_CERTIFICATION_TITLE = "Improving Deep Neural Networks: Hyperparameter Tuning, Regularization and Optimization" 53 | DL_TUNE_CERTIFICATION_PIC = Image.open(DL_TUNE_CERTIFICATION_PIC_PATH) 54 | DL_TUNE_CERTIFICATION_LINK = "https://www.coursera.org/account/accomplishments/verify/3XZ4UJ2CLZD5" 55 | DL_TUNE_CERTIFICATION_DESCRIPTION = """ 56 | - ✔ **The Machine Learning Specialization** is a foundational online program created in collaboration between **DeepLearning.AI and Stanford Online.** 57 | - ✔ This certification teaches the fundamentals of machine learning and how to use these techniques to build real-world AI applications. 58 | """ 59 | 60 | AWS_CERTIFICATION_TITLE = "AWS Certified Cloud Practitioner" 61 | AWS_CERTIFICATION_PIC = Image.open(AWS_CERTIFICATION_PIC_PATH) 62 | AWS_CERTIFICATION_LINK = "https://www.credly.com/badges/354d559c-4d1e-463c-8852-5dad79e75d13" 63 | AWS_CERTIFICATION_DESCRIPTION = """ 64 | - ✔ **The AWS Certified Cloud Practitioner** offers a foundational understanding of **AWS Cloud concepts, services, and terminology.** 65 | - ✔ This was a good starting point for me as I was interested in deploying **ML systems on the cloud.** 66 | - ✔ A potiential next step is to get the **Machine learning Specialty certification** if I need it for a job opportunity. 67 | """ 68 | 69 | NLP_CERTIFICATION_TITLE = "NLP - Natural Language Processing with Python" 70 | NLP_CERTIFICATION_PIC = Image.open(NLP_CERTIFICATION_PIC_PATH) 71 | NLP_CERTIFICATION_LINK = "https://www.udemy.com/certificate/UC-a13d1b17-6863-4209-8980-25f32ba51815/" 72 | NLP_CERTIFICATION_DESCRIPTION = """ 73 | - ✔ This Certification helped me : 74 | - 💠 Master Python-based text file manipulation and processing. 75 | - 💠 Gain expertise in handling PDF files and extracting information using Python. 76 | - 💠 Acquire skills in pattern searching using **regular expressions** and employ **Spacy** for efficient tokenization and vocabulary matching. 77 | """ 78 | # -------------------------------------- 79 | st.set_page_config(page_title=PAGE_TITLE, page_icon=PAGE_ICON,layout="wide") 80 | 81 | st.title("Certifications") 82 | # --------------- HELPER FUNCTIONS ----------------------- 83 | 84 | def certification_section(CERTIFICATION_TITLE,CERTIFICATION_LINK,CERTIFICATION_DESCRIPTION,CERTIFICATION_PIC): 85 | 86 | st.subheader(f"[{CERTIFICATION_TITLE}]({CERTIFICATION_LINK})") 87 | st.write(CERTIFICATION_DESCRIPTION, unsafe_allow_html=True) 88 | with st.expander("Check Certification"): 89 | st.image(CERTIFICATION_PIC,width=800) 90 | st.write('----') 91 | # ----------- CSS, PDF & Profile Pic SETTINGS -------------- 92 | 93 | with open(css_file) as f: 94 | st.markdown("".format(f.read()), unsafe_allow_html=True) 95 | 96 | 97 | # ------ HERO SECTION ----------- 98 | 99 | # ------ CERTIFICATION AWS ML SECTION --------- 100 | certification_section(AWS_ML_CERTIFICATION_TITLE,AWS_ML_CERTIFICATION_LINK,AWS_ML_CERTIFICATION_DESCRIPTION,AWS_ML_CERTIFICATION_PIC) 101 | 102 | # ------ CERTIFICATION ML SECTION --------- 103 | certification_section(ML_CERTIFICATION_TITLE,ML_CERTIFICATION_LINK,ML_CERTIFICATION_DESCRIPTION,ML_CERTIFICATION_PIC) 104 | 105 | # ------ CERTIFICATION NN DL SECTION --------- 106 | certification_section(NN_DL_CERTIFICATION_TITLE,NN_DL_CERTIFICATION_LINK,NN_DL_CERTIFICATION_DESCRIPTION,NN_DL_CERTIFICATION_PIC) 107 | 108 | # ------ CERTIFICATION DL SECTION --------- 109 | certification_section(DL_TUNE_CERTIFICATION_TITLE,DL_TUNE_CERTIFICATION_LINK,DL_TUNE_CERTIFICATION_DESCRIPTION,DL_TUNE_CERTIFICATION_PIC) 110 | 111 | # ------ CERTIFICATION AWS SECTION --------- 112 | certification_section(AWS_CERTIFICATION_TITLE,AWS_CERTIFICATION_LINK,AWS_CERTIFICATION_DESCRIPTION,AWS_CERTIFICATION_PIC) 113 | 114 | # ------ CERTIFICATION NLP SECTION --------- 115 | certification_section(NLP_CERTIFICATION_TITLE,NLP_CERTIFICATION_LINK,NLP_CERTIFICATION_DESCRIPTION,NLP_CERTIFICATION_PIC) 116 | 117 | -------------------------------------------------------------------------------- /pages/4_🧪_Personal_Projects.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import streamlit as st 4 | from streamlit_extras.switch_page_button import switch_page 5 | from PIL import Image 6 | 7 | 8 | # ------------ PATH SETTINGS ---------- 9 | current_dir = Path(__file__).parent.parent 10 | css_file = current_dir / "styles" / "main.css" 11 | 12 | E2E_PIC_PATH = current_dir / "assets" / "personal_projects" / "e2e.png" 13 | THREE_MODELS_PIC_PATH = current_dir / "assets" / "personal_projects" / "three_models.png" 14 | # ------------ CONSTANTS ---------- 15 | PAGE_TITLE = "Personal Projects | Et-tali Mouad" 16 | PAGE_ICON = "🏛" 17 | 18 | #-------- PERSONAL PROJECTS CONTENT---------- 19 | NLP_PROJECT_TITLE = "Dimensionality reduction and clustering of scientific articles/wikipedia summaries/news (link)" 20 | NLP_PROJECT_LINK = "https://github.com/MouadEttali/NLP-and-Text_Mining" 21 | NLP_PROJECT_KEYWORDS = "NLP, text mining, clustering, dimensionality reduction, interprability, Word2Vec, GloVe, BERT, RoBERTa" 22 | NLP_PROJECT_STACK = "PCA, t-SNE, UMAP, Autoencoders, Kmeans, Spherical Kmeans, factorial Kmeans, Hierarchal clustering (WARD, Complete, Linkage, Single Metrics), HDBSCAN, Reduced kmeans, Deep Clustering Network, deep KMeans" 23 | NLP_PROJECT_DESCRIPTION = """ 24 | - ✔ The project consists of two parts : 25 | - 💠Part 1: Text Analysis and Clustering without Dimensionality Reduction 26 | - 🔸 Textual analysis and exploratory data analysis (EDA) of Scientific articles/News Headlines 27 | - 🔸 Clustering on the original data using different clustering algorithms 28 | - 🔸 Benchmarking the results for later comparison 29 | - 💠Part 2: Second Analysis and Clustering with Dimensionality Reduction 30 | - 🔸Tokenizing the data using Word2Vec, GloVe, BERT, and RoBERTa 31 | - 🔸Applying dimensionality reduction techniques (PCA, t-SNE, UMAP, Autoencoders) 32 | - 🔸Performing clustering on the reduced-dimensional data using various clustering algorithms. 33 | - 💡The goal is to gain insights from this data and identify clusters of similar articles/News based on their content, enabling better understanding and organization of of this sort of corpus for various purposes. 34 | """ 35 | 36 | NN_PROJECT_TITLE = "Implementation of a neural network for semi-supervised learning to predict MNIST data (link)" 37 | NN_PROJECT_LINK = "https://github.com/MouadEttali/ComputerVision_DeepLearning/tree/main/PseudoLabelingProject" 38 | NN_PROJECT_KEYWORDS = "Neural Networks, Semi-supervised Learning, Multi-class classification" 39 | NN_PROJECT_STACK = "Tensorflow, Keras, Scikit-learn, Numpy" 40 | NN_PROJECT_DESCRIPTION = """ 41 | - ✔ Implemented a neural network for MNIST data prediction with only **100 labeled images** using the semi-supervised learning method proposed in the scientific paper "Pseudo-label: The simple and Efficient Semi-Supervised Learning Method for Deep Neural Networks". 42 | - ✔ Following the same approach as the article, the project utilizes pseudo-labels as the real labels in the learning process to maximize prediction and classify unlabeled data. 43 | - ✔ The project considers pseudo-labeling as a prerequisite for semi-supervised learning, leveraging both labeled and unlabeled data simultaneously to train the neural network and **achieve higher accuracy during testing.** 44 | - ✔ By incorporating the semi-supervised learning technique, the algorithm offers improved accuracy by effectively utilizing both labeled and unlabeled data in the training process and achieves a **76.95% accuracy.** 45 | """ 46 | 47 | ML_algos_PROJECT_TITLE = "Implementation of machine learning algorithms using numpy from mathematical foundations (link)" 48 | ML_algos_PROJECT_LINK = "https://github.com/MouadEt-tali/From-scratch-MlAlgorithms" 49 | ML_algos_PROJECT_KEYWORDS = "Coding, Machine Learning, First Principles" 50 | ML_algos_PROJECT_STACK = "Numpy, Python" 51 | ML_algos_PROJECT_DESCRIPTION = """ 52 | - ✔ I coded a few classical machine learning models from scratch, starting from their basic mathematical fundamentals, and then translating that into python code. 53 | - ✔ Purpose : 54 | - 💠 Gaining a deeper understanding and insight into the inner workings of these algorithms. 55 | - 💠 Developing a higher level of understanding regarding their use cases, strengths, weaknesses, and various implementations. 56 | - ✔ Completed Algorithms: Linear Regression, Multiple Linear Regression, Logistic Regression, KNN and Support Vector Machine. Continual updates and exploration of other algorithms are in progress. 57 | """ 58 | E2E_PIC = Image.open(E2E_PIC_PATH) 59 | E2E_FLASK_PROJECT_TITLE = "End to End chrun prediction using Flask and aws (link)" 60 | E2E_FLASK_PROJECT_LINK = "https://github.com/MouadEttali/End_to_END_churn_predictor" 61 | E2E_FLASK_PROJECT_KEYWORDS = "Machine Learning application, Deployement, API" 62 | E2E_FLASK_PROJECT_STACK = "Python, Flask, AWS" 63 | E2E_FLASK_PROJECT_DESCRIPTION = """ 64 | - ✔ I trained a model to predict Employee attrition based on multiple factors. such as Satisfaction Level, Number of Projects, Salary... 65 | - ✔ Wrote the REST API and coded a simple UI frontend for this ML project 66 | - ✔ Deployed it on AWS EC2 instance 67 | """ 68 | THREE_MODELS_PIC = Image.open(THREE_MODELS_PIC_PATH) 69 | THREE_MODELS_PROJECT_TITLE = "Streamlit machine learning application that contains 3 models for predicting Diabetes , Parkinson's and Heart Disease (link)" 70 | THREE_MODELS_PROJECT_LINK = "https://github.com/MouadEttali/streamlitHerokuApp" 71 | THREE_MODELS_PROJECT_KEYWORDS = "Machine Learning application" 72 | THREE_MODELS_PROJECT_STACK = "Python, Streamlit" 73 | THREE_MODELS_PROJECT_DESCRIPTION = """ 74 | - ✔ I trained binary classifiers such as Logistic regression and SVM to give an accurate prediction of these 3 of diseases : 75 | - ✔ The data contains fields such as insulin level, age,chest pain levels ... 76 | - ✔ Performances 77 | -💠Diabetes data : 89% recall, 91% precision 78 | -💠Parkinsons data : 85% recall, 89% precision 79 | -💠Heart Disease data : 91% recall, 96% precision 80 | - ✔ Deployed the application using Heroku 81 | """ 82 | # -------------------------------------- 83 | st.set_page_config(page_title=PAGE_TITLE, page_icon=PAGE_ICON,layout="wide") 84 | 85 | st.title("Personal Projects") 86 | # --------------- HELPER FUNCTIONS ----------------------- 87 | 88 | def personal_project_section(PROJECT_TITLE,PROJECT_LINK,PROJECT_KEYWORDS,PROJECT_STACK,PROJECT_DESCRIPTION): 89 | 90 | st.subheader(f"[{PROJECT_TITLE}]({PROJECT_LINK})") 91 | st.write('---') 92 | st.write(f'''**Keywords :** {PROJECT_KEYWORDS}''', unsafe_allow_html=True) 93 | 94 | st.write(PROJECT_DESCRIPTION, unsafe_allow_html=True) 95 | st.write(f'''**Technologies Used :** {PROJECT_STACK}''', unsafe_allow_html=True) 96 | st.write('\n') 97 | # ----------- CSS, PDF & Profile Pic SETTINGS -------------- 98 | 99 | with open(css_file) as f: 100 | st.markdown("".format(f.read()), unsafe_allow_html=True) 101 | 102 | 103 | # ------ HERO SECTION ----------- 104 | 105 | # ------ Projet 1 SECTION --------- 106 | personal_project_section(NLP_PROJECT_TITLE,NLP_PROJECT_LINK,NLP_PROJECT_KEYWORDS,NLP_PROJECT_STACK,NLP_PROJECT_DESCRIPTION) 107 | st.write('----') 108 | # ------ Project 2 SECTION --------- 109 | # personal_project_section(PROJECT_TITLE,PROJECT_KEYWORDS,PROJECT_STACK,PROJECT_DESCRIPTION) 110 | personal_project_section(NN_PROJECT_TITLE,NN_PROJECT_LINK,NN_PROJECT_KEYWORDS,NN_PROJECT_STACK,NN_PROJECT_DESCRIPTION) 111 | st.write('----') 112 | 113 | # #------ Project 3 SECTION 114 | personal_project_section(ML_algos_PROJECT_TITLE,ML_algos_PROJECT_LINK,ML_algos_PROJECT_KEYWORDS,ML_algos_PROJECT_STACK,ML_algos_PROJECT_DESCRIPTION) 115 | st.write('----') 116 | 117 | #------ Project 4 SECTION 118 | personal_project_section(E2E_FLASK_PROJECT_TITLE,E2E_FLASK_PROJECT_LINK,E2E_FLASK_PROJECT_KEYWORDS,E2E_FLASK_PROJECT_STACK,E2E_FLASK_PROJECT_DESCRIPTION) 119 | with st.expander("**Preview of deliverables :** "): 120 | st.image(E2E_PIC,width=1000) 121 | st.write('----') 122 | 123 | #------ Project 4 SECTION 124 | personal_project_section(THREE_MODELS_PROJECT_TITLE,THREE_MODELS_PROJECT_LINK,THREE_MODELS_PROJECT_KEYWORDS,THREE_MODELS_PROJECT_STACK,THREE_MODELS_PROJECT_DESCRIPTION) 125 | with st.expander("**Preview of deliverables :** "): 126 | st.image(THREE_MODELS_PIC,width=1000) 127 | st.write('----') -------------------------------------------------------------------------------- /pages/3_⚒️_Professional_Experiences.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import streamlit as st 4 | from streamlit_extras.switch_page_button import switch_page 5 | from PIL import Image 6 | 7 | 8 | # ------------ PATH SETTINGS ---------- 9 | current_dir = Path(__file__).parent.parent 10 | css_file = current_dir / "styles" / "main.css" 11 | AQSONE_PIC_PATH = current_dir / "assets" / "work" / "aqsone" /"aqsone.png" 12 | ASTEK_PIC_PATH = current_dir / "assets" / "work" / "astek" / "astek.png" 13 | ALEXSYS_PIC_PATH = current_dir / "assets" / "work" / "alexsys" / "alexsys.png" 14 | DADVISOR_PIC_PATH = current_dir / "assets" / "work" / "digitaladvisor" / "digitaladvisor.png" 15 | preview_aqsone = [f"""{current_dir}/assets/work/aqsone/{example}""" for example in ['example_1.png','example_2.png']] 16 | preview_astek = [f"""{current_dir}/assets/work/astek/{example}""" for example in ['navigation.png','activity.png','database.png','sensor.png','data.png']] 17 | preview_alexsys = [f"""{current_dir}/assets/work/alexsys/{example}""" for example in ['home.png','auth.png','add_person.png']] 18 | 19 | # ------------ CONSTANTS ---------- 20 | PAGE_TITLE = "Work Experiences | Et-tali Mouad" 21 | PAGE_ICON = "🏛" 22 | 23 | #-------- WORK EXPERIENCE CONTENT---------- 24 | COMMUN_ROLE = "Data Scientist" 25 | AQSONE_PIC = Image.open(AQSONE_PIC_PATH) 26 | AQSONE_COMPANY = "Aqsone" 27 | AQSONE_PERIOD = "09/2022 - Present" 28 | AQSONE_DESCRIPTION = """ 29 | - ► Nexans mission : 30 | - 💠 Correction of inconsistencies in historical purchasing and supplier data. Enrichment of missing data impacting +440 M€ in materials thanks to deduplication and ML clustering methods. 31 | - ► Allegro Musique mission : 32 | - 💠 Participation dans le développement d’une solution ML de recommendation de professeurs de musiques à des élèves cherchant des cours. La solution est développé entierment sur AWS et utilise plusieurs services **( Sagemaker, Lambda, API Gateway, S3, RDS )** 33 | - ► Collaborated on the creation of a Digital costing solution that predicts cost of clothing items using Image and description, based on Convolutional Neural Networks and Transformers. 34 | - ► Development of a 360° Procurement solution using Python, AWS MySQL and Google Data Studio with interactive dashboards including Forecasts, Spend Analysis, Supplier audit, CO2 emissions and more. 35 | - ► Participated in the creation of a Succession Planning solution that uses Machine Learning for optimal successor choice, using d3js for visualizations. 36 | - ► Commercial work : Participated in the the developement of multiple proofs of concept to demonstrate to prospects and clients for biz dev purposes 37 | - ► Internal work : 38 | - 💠 Along 2 other data scientist and an Agile coach, we handle the management of different courses and certifications for the rest of the company. 39 | - 💠 organization monthly presentations about the state of the art in the fields of data, AI and ML. As well as introduce new tools to our collaborators 40 | """ 41 | 42 | ASTEK_PIC = Image.open(ASTEK_PIC_PATH) 43 | ASTEK_COMPANY = "Astek Researche Lab" 44 | ASTEK_PERIOD = "02/2022 - 08/2022" 45 | ASTEK_DESCRIPTION = """ 46 | - ► Implemented a XGBOOST model that utilizes a user's tactile, orientation, and acceleration phone usage data to determine whether he is the owner of the phone or an impostor. 47 | - ► Developed an application with React and MongoDB that collects training data through built-in sensor APIs. 48 | - ► The model achieves 94% accuracy and an F1 score of 88% on 7 users, and can also identify phones used by the same user. 49 | - ► Write a report on the work, detailing the thoughts and reflexions on the subject, the experiments considered and carried out, and the conclusions regarding the scientific issues and uncertainties developed. 50 | - ► The app I developped allowed the user to create his own expirements by specifying which sensors are used, the types of activities such reading/ walking/ comparing images. And for activities with data you can add text or images. Finally it was deployed on heorku and Atlas Cloud (MongoDB) as a database. 51 | """ 52 | 53 | ALEXSYS_PIC = Image.open(ALEXSYS_PIC_PATH) 54 | ALEXSYS_COMPANY = "ALEXSYS SOLUTIONS" 55 | ALEXSYS_PERIOD = "12/2020 - 08/2020" 56 | ALEXSYS_DESCRIPTION = """ 57 | - ► Mission ONCF (Office National des Chemins de Fer du Maroc) : 58 | - 💠 Implemented a classification and optical character recognition (OCR) model using YOLOv4 on scans of legal documents in Arabic. The developed system reduces the processing time of each document to the detection time, allowing the batch detection of 1000 documents in 5 minutes (3.33 documents per second). 59 | - ► Mission TotalEnergies : 60 | - 💠 Development of a license plate detection and reading application for natural gas distribution trucks. This system has reduced the time it takes for trucks to enter the gas distribution center by an average of 2 minutes and 53 seconds. 61 | """ 62 | 63 | DADVISOR_PIC = Image.open(DADVISOR_PIC_PATH) 64 | DADVISOR_COMPANY = "DigitalAdvisor" 65 | DADVISOR_PERIOD = "01/2020 - 08/2020" 66 | DADVISOR_DESCRIPTION = """ 67 | - ► Implemented a multiclass classification model to predict **the types of failures of industrial machines** 86% Accuracy 79% F1 score. 68 | - ► Implemented a regression model to predict **the Remaining Useful Time of industrial machines** using the algorithms: XGBoost, Random Forest and Support Vector Machine. 69 | - ► Participated in the development of a real time monitoring application for industrial machines using Flask. 70 | """ 71 | # -------------------------------------- 72 | st.set_page_config(page_title=PAGE_TITLE, page_icon=PAGE_ICON,layout="wide") 73 | 74 | st.title("Professional Experiences") 75 | # --------------- HELPER FUNCTIONS ----------------------- 76 | 77 | def work_experience_section(PIC,ROLE,COMPANY,PERIOD,WORK_DESCRIPTION): 78 | 79 | st.image(PIC,width=150) 80 | st.write(f"**{ROLE} | {COMPANY}**") 81 | st.write(f"{PERIOD}") 82 | st.write(WORK_DESCRIPTION, unsafe_allow_html=True) 83 | 84 | 85 | # ----------- CSS, PDF & Profile Pic SETTINGS -------------- 86 | 87 | with open(css_file) as f: 88 | st.markdown("".format(f.read()), unsafe_allow_html=True) 89 | 90 | 91 | # ------ HERO SECTION ----------- 92 | 93 | # ------ AQSONE SECTION --------- 94 | work_experience_section(AQSONE_PIC,COMMUN_ROLE,AQSONE_COMPANY,AQSONE_PERIOD,AQSONE_DESCRIPTION) 95 | with st.expander("**Preview of deliverables :** "): 96 | images = [Image.open(image) for image in preview_aqsone] 97 | cols = st.columns(len(images)) 98 | for col,image in zip(cols,images): 99 | with col: 100 | st.image(image,width=600) 101 | st.write('----') 102 | 103 | # ------ ASTEK SECTION --------- 104 | work_experience_section(ASTEK_PIC,COMMUN_ROLE,ASTEK_COMPANY,ASTEK_PERIOD,ASTEK_DESCRIPTION) 105 | images = [Image.open(image) for image in preview_astek] 106 | image_captions = ['Navigation between pages' , 'Adding an activity to the experiment','MongoDB Cloud Database' ,'Adding a sensor to the experiment','Adding data and time limit to an activity'] 107 | with st.expander("**Preview of deliverables :** "): 108 | cols = st.columns(3,gap="large") 109 | for i,image in enumerate(images): 110 | with cols[i%3]: 111 | st.image(image,caption=image_captions[i],width=400) 112 | st.write('----') 113 | 114 | #------ Alexsys Solutions SECTION 115 | work_experience_section(ALEXSYS_PIC,COMMUN_ROLE,ALEXSYS_COMPANY,ALEXSYS_PERIOD,ALEXSYS_DESCRIPTION) 116 | with st.expander("**Preview of deliverables :** "): 117 | images = [Image.open(image) for image in preview_alexsys] 118 | cols = st.columns(len(images)) 119 | for col,image in zip(cols,images): 120 | with col: 121 | st.image(image,width=400) 122 | st.write('----') 123 | #------ Digital Advisor SECTION 124 | work_experience_section(DADVISOR_PIC,COMMUN_ROLE,DADVISOR_COMPANY,DADVISOR_PERIOD,DADVISOR_DESCRIPTION) 125 | 126 | 127 | st.write('----') --------------------------------------------------------------------------------