├── .gitignore ├── LICENSE ├── README.md ├── contributing.md ├── demo01 ├── README.md ├── bshr_loop.py ├── src │ ├── README.md │ ├── example.env │ ├── main.py │ ├── settings.py │ ├── system01_brainstorm.py │ ├── system02_hypothesize.py │ ├── system03_satisfice.py │ ├── system04_refine.py │ └── utils.py ├── system01_brainstorm_search_queries.txt ├── system02_hypothesize.txt ├── system03_satisficing_check.txt ├── system04_spr_refine.txt ├── technique02_brainstorm_search_hypothesize_refine.py └── wiki_test.py ├── demo02 ├── Building_David_Shapiro_AI_Body_of_Knowledge_Part_1_Collect_YouTube_Videos.ipynb ├── Building_David_Shapiro_AI_Body_of_Knowledge_Part_2_YouTube_Speech_to_Text.ipynb ├── Building_David_Shapiro_AI_Body_of_Knowledge_Part_3_Upsert_YouTube_Data.ipynb ├── Building_David_Shapiro_AI_Body_of_Knowledge_Part_4_Query_Data.ipynb ├── readme.md └── whisper_chunks │ ├── -3WpqPZgWAk_large.txt │ ├── -H7JM0A7xRE_large.txt │ ├── -kVoVUe9rQw_large.txt │ ├── -s38ihwkShw_large.txt │ ├── 01hDD9V4hMU_large.txt │ ├── 1GN8zK9wgcs_large.txt │ ├── 1KdgN85_M6U_large.txt │ ├── 24srC931Yxo_large.txt │ ├── 2fNT4WZwLfM_large.txt │ ├── 2xNzB7xq8nk_large.txt │ ├── 2zBIwcYYYH8_large.txt │ ├── 3U3ZsPjcpEY_large.txt │ ├── 3Xa3lu00wfg_large.txt │ ├── 3cYaMWgRyXc_large.txt │ ├── 3sbqTXKlLws_large.txt │ ├── 4kutenB9Er0_large.txt │ ├── 54GYdT2bvOQ_large.txt │ ├── 5OLIJvMYwsc_large.txt │ ├── 5T4x_0o5qRQ_large.txt │ ├── 5UmAsFe7zQg_large.txt │ ├── 5ZImuCe6P2E_large.txt │ ├── 7HcQ87RFN5k_large.txt │ ├── 7WwqV0cBoGA_large.txt │ ├── 7ZTdoQPRbw8_large.txt │ ├── 8Jj-l95Ty0o_large.txt │ ├── 8izTG3yxoak_large.txt │ ├── 8phmHSUDzRg_large.txt │ ├── 9E4sq7yAet0_large.txt │ ├── 9OPap5UIhrA_large.txt │ ├── 9qq6HTr7Ocw_large.txt │ ├── A1BFPo_gaEU_large.txt │ ├── AGkKblnDQhM_large.txt │ ├── AWb05fizbiE_large.txt │ ├── A_BL_pu4Gtk_large.txt │ ├── Ahh92qtRwos_large.txt │ ├── BbeOB2i6XqQ_large.txt │ ├── BtP3zDvUfv8_large.txt │ ├── CkOolFpHWM8_large.txt │ ├── Dr2LOYrvnRM_large.txt │ ├── EAger7jOrsA_large.txt │ ├── EApYP0SDctA_large.txt │ ├── EG0wQRsXLi4_large.txt │ ├── EkgY90QXS2k_large.txt │ ├── EwJ1534Gy6g_large.txt │ ├── EzyNxcFUWgI_large.txt │ ├── F0lIt__jkKw_large.txt │ ├── FBvrZng0Nik_large.txt │ ├── FIB6BZwNm4c_large.txt │ ├── Fsa1QuLIXI4_large.txt │ ├── FuaLGGAFDlA_large.txt │ ├── G0WTlifF-JQ_large.txt │ ├── GD4SXrB9TYY_large.txt │ ├── Ga8ijBaUXc4_large.txt │ ├── GkdGOVAz1eI_large.txt │ ├── GnOh5mSlWCw_large.txt │ ├── GvM6jmmAmfA_large.txt │ ├── H1DcbSZqMF0_large.txt │ ├── H5t9LwkuFXM_large.txt │ ├── HBONmpBAdpE_large.txt │ ├── HK8Yv5w-iVk_large.txt │ ├── HNtKVrQMNZs_large.txt │ ├── I7hJggz41oU_large.txt │ ├── IFXT5uYRxjo_large.txt │ ├── IsKk_F3DCVs_large.txt │ ├── J77GcB706PA_large.txt │ ├── Jh4neNM0oZ0_large.txt │ ├── JxhG3H2-EIE_large.txt │ ├── KNi8-va2C-g_large.txt │ ├── KYsOgziCLac_large.txt │ ├── Kcj04uQgwxc_large.txt │ ├── KpmEg71HO0s_large.txt │ ├── KuvQ3qIGnA0_large.txt │ ├── L-s01aioiz4_large.txt │ ├── LlJJrNuArho_large.txt │ ├── M5eQwl4YmGU_large.txt │ ├── MLItFQW_iAA_large.txt │ ├── MSVDsgOKtyo_large.txt │ ├── N8p6u1OtARs_large.txt │ ├── NC7990PmDfM_large.txt │ ├── NkA_bSE3N1k_large.txt │ ├── O249nnUDCgw_large.txt │ ├── OVD_3F9LD3E_large.txt │ ├── Oawp_McdHzU_large.txt │ ├── OzAv2ZmUiR0_large.txt │ ├── P-8TQXDbllU_large.txt │ ├── PgwpqjiKkoY_large.txt │ ├── Q0P-RcgBY_s_large.txt │ ├── Q1IntjPdW64_large.txt │ ├── QGLF3UbDf7g_large.txt │ ├── QotF4TgnDhU_large.txt │ ├── Qty50O6LMNk_large.txt │ ├── R0wBMDoFkP0_large.txt │ ├── README.md │ ├── SCZcaW6VGX8_large.txt │ ├── T8ktpayOBWE_large.txt │ ├── TQyxWIBFCRM_large.txt │ ├── TkxroMCmpDw_large.txt │ ├── TtvB75yRbto_large.txt │ ├── TuACYIvPmIc_large.txt │ ├── UaiP9MvCAJ8_large.txt │ ├── Upae72aFvdU_large.txt │ ├── UunRZEIpIRQ_large.txt │ ├── V-suQq_YvDY_large.txt │ ├── VHkHw-NQ5Wk_large.txt │ ├── VITNpuePAqs_large.txt │ ├── VSeGgDNONaY_large.txt │ ├── W2vGDqyDUfQ_large.txt │ ├── Wk7CN-6ppEA_large.txt │ ├── XEE17Z8hrto_large.txt │ ├── XX6K85kPxpU_large.txt │ ├── XXbCiX0Owv4_large.txt │ ├── XwBrlbrLsH0_large.txt │ ├── Y1gBosXlM8c_large.txt │ ├── YDfjmiTAZMk_large.txt │ ├── YRIQ-61PwB4_large.txt │ ├── YXQ6OKSvzfc_large.txt │ ├── YfjaspSWI0c_large.txt │ ├── YhSR9wBOdFw_large.txt │ ├── YhYW2YIAQ5c_large.txt │ ├── YjdmYCd6y0M_large.txt │ ├── YwUy5meLnNY_large.txt │ ├── ZX5MHNvjw7o_large.txt │ ├── Z_cJIedDOCw_large.txt │ ├── a4F37zGNwyc_large.txt │ ├── aCvzpM_TVeE_large.txt │ ├── aq7fnqzeaPc_large.txt │ ├── b37uygvPq0k_large.txt │ ├── c07eWV6Pois_large.txt │ ├── c3aiCrk0F0U_large.txt │ ├── c9QE9oIvlEU_large.txt │ ├── ck79iEyWyQQ_large.txt │ ├── dD7PFH172BQ_large.txt │ ├── e-Fh3Zw_7YU_large.txt │ ├── eafRE74JGZ8_large.txt │ ├── ekn5Tcqgs7o_large.txt │ ├── es8e4SEuvV0_large.txt │ ├── es9jZQ8QqNA_large.txt │ ├── g-d-Ny4KGiE_large.txt │ ├── gQuLRdBYn8Q_large.txt │ ├── gbeQUha_stI_large.txt │ ├── hSPGSoi59sQ_large.txt │ ├── iQngDalZvS0_large.txt │ ├── izRrTU-PUCQ_large.txt │ ├── jMMpgtHhZQY_large.txt │ ├── jYXFllZkhyg_large.txt │ ├── jkTlGP_2dKo_large.txt │ ├── kl0TrLkNUfw_large.txt │ ├── l7XrSB6aWEQ_large.txt │ ├── mohG7fWglww_large.txt │ ├── n1ZXfhk64Mk_large.txt │ ├── n8X2h8Mg3WE_large.txt │ ├── nDOmoIFx8Ww_large.txt │ ├── nTobHOyLvRU_large.txt │ ├── nvr9C6B1ask_large.txt │ ├── oILYjtbmLgc_large.txt │ ├── oVP_aB5rJL8_large.txt │ ├── ozfiYRES9GQ_large.txt │ ├── p3_9h6kXUhk_large.txt │ ├── piRMk2KIx2o_large.txt │ ├── pkzHHaAJRqA_large.txt │ ├── pvc_XDDrwgc_large.txt │ ├── rV-qyM0hHQY_large.txt │ ├── s0M6y5sCUVM_large.txt │ ├── sJLB2EIQUvQ_large.txt │ ├── sY_5irZNZ0o_large.txt │ ├── sfD-rs5lmEs_large.txt │ ├── si58om5_YBI_large.txt │ ├── skozI33D3t4_large.txt │ ├── tD_-KCcVCsk_large.txt │ ├── tZGNoJhHtTA_large.txt │ ├── tmBX3Y9FyOM_large.txt │ ├── u-MfZ-DHrtg_large.txt │ ├── ujwZ3bRfw9U_large.txt │ ├── v2gD8BHOaX4_large.txt │ ├── vciliX10Ex4_large.txt │ ├── xCD8-r4W9To_large.txt │ ├── xIodby52_Tk_large.txt │ ├── x_GlmdrGuXo_large.txt │ ├── yVaRZI6nU8M_large.txt │ ├── zAvPf1GNhHs_large.txt │ ├── ziF0MkExboE_large.txt │ ├── znPNQcTgLO8_large.txt │ ├── zrW3K_K_kuo_large.txt │ ├── zwLsnsp7FF4_large.txt │ └── zx2wHkeSM_E_large.txt └── transcripts ├── 1000 Subs Livestream! AI alignment, writing fiction, LLMs, and more!.txt ├── 3 Predictions for the future of AI jobs.txt ├── 5 Reasons GPT-4 Will Be Disappointing.txt ├── 5 Tips and Misconceptions about Finetuning GPT-3.txt ├── 57% of people Machines will be sentient one day (10% say it's already happened!).txt ├── 8 Post-Singularity Lifestyles + The Latest and Best AI Tools + Functional Machine Sentience!.txt ├── ACE Framework Overview and Intro Autonomous AI Agents!.txt ├── ACE Paper is Published! Repo tour! Get involved!.txt ├── AGI Alignment Experiments INSTRUCT vs Foundation and Agent Models.txt ├── AGI Poll results AGI not dangerous, might destroy lots of jobs.txt ├── AGI Revolution How Businesses, Governments, and Individuals can Prepare.txt ├── AGI Unleashed Game Theory, Byzantine Generals, and the Heuristic Imperatives.txt ├── AGI Within 12 Months! Rumors, Leaks, and Trends - Announcing Open MURPHIE robotic platform.txt ├── AGI within 18 months explained with a boatload of papers and projects.txt ├── AI & Evolution How the rise of AI might shape the future of humanity.txt ├── AI + Blockchain + DAO = Success! How Decentralized Autonomous Organizations will control EVERYTHING!.txt ├── AI Companions Social Impact - How tech like ChatGPT, virtual companions, and robots will change us.txt ├── AI Insights Forum Closed Door Senate Meeting with Tech Execs.txt ├── AI Jobpocalypse How tech like ChatGPT will impact your life by 2030 (and what to do about it).txt ├── AI Layoffs are Coming Gizmodo Fires Editors.txt ├── AI News CoALA, Theory of Mind, Artificial Neurons, Swarm Intelligence, and Neural Convergence.txt ├── AI Startup Crash Course - Ep. 1 - Founder Team.txt ├── AI Startup Crash Course - Ep. 2 - Experiment, Experiment, Experiment.txt ├── AI Startup Crash Course - Ep. 3 - Network, Network, Network.txt ├── AI Winter is Not Coming Where in the Gartner Hype Cycle Are we What comes next (It gets messy!).txt ├── Accelerating Science with AI Quickly Read Every Paper and Get Key Insights in Bulk.txt ├── Alignment Collectivist vs Egocentric Agent Models (I vs we) Can we create a unique mind.txt ├── Alignment Research GPT-3 vs GPT-NeoX - which one understands AGI alignment better.txt ├── America to hit 82% Unemployment I have the data to back it up.txt ├── Anna Bernstein - Professional Prompt Engineer - We don't have to forfeit the realm of creativity.txt ├── Answer complex questions from an arbitrarily large set of documents with vector search and GPT-3.txt ├── Are LaMDA or GPT-3 sentient No, but....txt ├── Are chatbots lying to us This is worse than you think..txt ├── AutoMuse Announcement & Signup.txt ├── AutoMuse ChatGPT local Python GUI (TKINTER) - Now it can read my entire novel and do QA!.txt ├── AutoMuse Scene Simulation is Working! GPT-3 Finetuned to write sequences of fiction.txt ├── AutoMuse Synopsis GAN (Generative Adversarial Network) for self-improving synopsis generators!.txt ├── AutoMuse Synopsis and Plot Generator Walkthrough.txt ├── AutoMuse Working on Character and Scene Simulations (GPT-3 Finetuning Project).txt ├── AutoMuse Writing a Novel with GPT-3 as a Plotter pt.1 (decompose the problem into steps).txt ├── AutoMuse3 Generate fiction with text-based simulation and cognitive architecture for characters.txt ├── Automatic Literature Review with GPT-3 - I embedded and indexed all of arXiv into a search engine!.txt ├── Automating Science with GPT-4 - attempting (and failing) to perform autonomous literature review.txt ├── Axiomatic Alignment A critical component to Utopia and the Control Problem.txt ├── BOMBSHELL Mustafa Suleyman AI will reshuffle society.txt ├── Beyond Vector Search Knowledge Management with Generative AI.txt ├── Billion-dollar GPT-3 startup! Fix education with an expert tutor chatbot!.txt ├── Bing vs Bard Who will win Google or Microsoft A breakdown and analysis of the recent news.txt ├── Biological Immortality by 2030 Social & Economic Implications + Some Predictions!.txt ├── Can GPT-3 generate training data Short answer Yes! Here's why that's a legit methodology....txt ├── ChatGPT - Good, Bad, Indifferent. What will it change What does this mean.txt ├── ChatGPT API QA Chatbot ep 1 - Data Prep [Automating Regenerative Science].txt ├── ChatGPT API with Salience and Anticipation of User Needs Towards a fully autonomous assistant.txt ├── ChatGPT SYSTEM Prompt Engineering (Deep Dive).txt ├── ChatGPT as an Interpreter Introducing the KB Microservice for autonomous AI entities.txt ├── ChatGPT for Absolute Beginners - What is it and how does it work.txt ├── ChatGPT was lobotomized for coding and GitHub Copilot is broken... so I made my own!.txt ├── ChatGPT, Bing, and Google How our relationship with information is changing.txt ├── ChatGPT4 - Sparse Priming Representations, Hierarchical Memory Consolidation, and Implied Cognition!.txt ├── Chatbot with INFINITE MEMORY using OpenAI & Pinecone - GPT-3, Embeddings, ADA, Vector DB, Semantic.txt ├── Co-writing flash fiction with GPT-3.txt ├── Coding Raven's Encyclopedia Service (v.1).txt ├── Cognitive AI Lab Community Update + consensus, books, and my next steps.txt ├── Cognitive AI Lab Podcast #1 - Decentralized Democracy and AI Companions.txt ├── Cognitive AI Lab Podcast 2022-09-21 - Decentralized AI, DAOs, and AI art.txt ├── Cognitive Architecture - April 28 - Salience, Cognitive Control, Task Management, Modular Design.txt ├── Concepts in Neuroscience and Cognition - Deficits of GPT-3 and the path to AGI and ACE.txt ├── Convergence and acceleration towards AGI (or Artificial Cognitive Entities).txt ├── Core Objective Functions vs Thanos Logic.txt ├── Could a machine ever be fully sentient or conscious A deep dive on philosophy, quantum mechanics....txt ├── Cover Letter & Professional Objective Generator with GPT-3.txt ├── Cyberpunk Dystopia Why it's coming and how we can avoid it.txt ├── DALLE2 Style Tags Tutorial - Elven archer in a sunny forest with different tags.txt ├── DIY ChatGPT Enhancing RAVEN's long-term memories and starting to work on self-reflection.txt ├── Decreasingly Verbose Alice in Wonderland - down to 55 characters!.txt ├── DeepMind, OpenAI, Microsoft, Oxford Global AI Research & Regulation is Coming.txt ├── Don't Use MemGPT!! This is way better (and easier)! Use Sparse Priming Representations!.txt ├── Doomerism, Denialism, and Optimism. Introducing the GATO Framework. AI Avengers... Assemble!!.txt ├── Elon Musk xAI Maximize Understanding of the Universe [This is the best news all year].txt ├── Encyclopedia Service Demo (v.1).txt ├── Energy Hyper-Abundance Solar, Fusion, Geopolitics, & AI.txt ├── Escaped Sapiens David Shapiro on AI alignment, safety, and the future of humanity.txt ├── FDA Approves AI Orphan Drug.txt ├── Fine-tuning GPT-3 for benevolent and trustworthy AGI.txt ├── Fine-tuning GPT-3 to generate questions about anything.txt ├── Finetune GPT-3 to write a coherent novel - Part 4 (success! with minor bugs...).txt ├── Finetune GPT-3 to write a novel - Part 3 (IT WORKS!!!) ...at least a little bit.txt ├── Finetune GPT-3 to write an entire coherent novel (part 1).txt ├── Finetune GPT-3 to write an entire coherent novel (part 2).txt ├── Finetune a perfect email generator in GPT-3 - take any input and generate a great email.txt ├── Finetune multiple cognitive tasks with GPT-3 on medical texts (and reduce hallucination).txt ├── Finetuning GPT-3 101 Augmenting Training Data.txt ├── Finetuning GPT-3 101 Synthesizing Training Data.txt ├── Finetuning GPT-3 101 Using Your Finetuned Model.txt ├── Finetuning GPT-3 to be a master tutor that can handle any topic and hostile students.txt ├── Finetuning a Creative Writing Coach in GPT-3 - Part 1.txt ├── Finetuning a Creative Writing Coach in GPT-3 - Part 2.txt ├── First look at ChatGPT API - the age of Autonomous AI begins TODAY! Cognitive Architectures ahoy!.txt ├── First look! ChatGPT-4 has 32,000 tokens! Continuing work on my ASCENSION novel with the latest!.txt ├── Fixing Democracy with AI - Generating Diverse Perspectives for Consensus and Compromise.txt ├── Fixing goldfish memory with GPT-3 and external sources of information in a chatbot - part 1.txt ├── Fixing goldfish memory with GPT-3 and external sources of information in a chatbot - part 2.txt ├── Forever Jobs AGI and Singularity Proof Occupations (plus a few that SHOULD go away!).txt ├── From zero to finetuned model in 1 hour with GPT-3. Generate a movie script from any premise!.txt ├── GPT Masterclass 4 Years of Prompt Engineering in 16 Minutes.txt ├── GPT Prompt Strategy Latent Space Activation - what EVERYONE is missing!.txt ├── GPT-3 Marketing Aid Chatbot with Long Term Memory (easiest way).txt ├── GPT-3 Working Session Finetune an information companion chatbot in 30 minutes (RESEARCH ONLY).txt ├── GPT-3 for Writing Dialog.txt ├── GPT-5 Rumors and Predictions - It's about to get real silly.txt ├── GPT3 & Finetuning the Core Objective Functions - a deep dive.txt ├── Generate 300 BILLION story synopses with GPT-3.txt ├── Generate a Cover Letter with a quick and simple GPT-3 Chatbot!.txt ├── Generating Training Data for Core Objective Functions.txt ├── Generative AI for CEOs How to think about AI today, and how to get the most ROI from it.txt ├── Generative AI for Product Owners The Rise of Polymorphic Applications!.txt ├── Get Started with Raven AGI.txt ├── Get awesome YouTube chapters with GPT-3 automatically in 20 seconds.txt ├── Global AI news you might have missed, plus using AI for automatically finding consensus.txt ├── How I stay up to date on the latest AI science news.txt ├── How Will Super Alignment Work Challenges and Criticisms of OpenAI's Approach to AGI Safety & X-Risk.txt ├── How close are we to building a real Westworld A look at the hardware, software, and social impact.txt ├── How could AGI break out of the lab Experts weigh in!.txt ├── How do we pay for UBI The answer is not what you think....txt ├── How to create synthetic datasets with GPT-3.txt ├── How will we know when we've invented AGI How will we know it is complete.txt ├── I built a thinking machine. Happy birthday, ACE!.txt ├── I built an AI doctor with ChatGPT - Full Clinical Experience.txt ├── I needed SQLITE but for vectors so I wrote it myself. Now it's on PyPI - introducing VDBLITE.txt ├── I summarized the EU AI Act so you can ask ChatGPT about it easily.txt ├── I took down my videos and code that could write novels. I'm glad I did..txt ├── I trained a DALLE chatbot to help you craft DALLE prompts!.txt ├── I'm moving away from Artificial General Intelligence and towards Artificial Cognition. Here's why..txt ├── I'm switching to Open Source LLM models! Introducing Forefront AI for GPT-J and finetuning.txt ├── I'm taking a short break from research and YouTube.txt ├── Implementing Natural Language Cognitive Architecture with GPT-3 and the nexus concept.txt ├── Increasingly Verbose Bot with GPT-3 - Expand any word or phrase into a whole paragraph.txt ├── Indexing Wikipedia with SOLR.txt ├── Inflation Reduction Act of 2022 summarized with AI - see final result in description!.txt ├── Introducing AutoMuse ChatGPT - Working on Temporal Hierarchical Memories for AGI & ACE systems.txt ├── Introducing Benevolent AGI - Autonomously Pursuing Heuristic Imperatives and Upholding Human Rights!.txt ├── Introducing the RAVEN MVP - a general purpose AI companion (with a live DEMO).txt ├── Is AGI actually God Not quite as spicy of a take as you might think....txt ├── It's alive! The first 3 microservices are up and running!.txt ├── LangChain for LLMs is... basically just an Ansible playbook.txt ├── Learn AI A Step By Step Guide (At least, this is how I did it) This is the way....txt ├── Let's build an ACOG (artificial cognitive entity) - Part 1.txt ├── Let's build an ACOG (artificial cognitive entity) - Part 2.txt ├── Let's build an ACOG (artificial cognitive entity) - Part 3.txt ├── Let's build an ACOG (artificial cognitive entity) - Part 4.txt ├── Let's build an ACOG (artificial cognitive entity) - Part 5 preparing to act.txt ├── Levels of Machine Autonomy (Roadmap to AGI).txt ├── Life under UBI Work, hobbies, and wellbeing without work.txt ├── MARAGI Overview, Architecture, and Roadmap (August 2022).txt ├── MARAGI work Starting on the Conductor Microservice.txt ├── Many of you have asked for it Join my new research Discord! Link in description.txt ├── Mastering the Crawl Walk Run Fly Model of AI Transformation.txt ├── Max Tegmark Language Models Understand Time and Space.txt ├── Meta AI LM-Infinite - Massive LLM improvement!.txt ├── Metaprompting with GPT-3 (getting GPT-3 to write its own prompts) RECURSIVE NLP PROGRAMMING!!.txt ├── Metaprompting with GPT-3 to dynamically generate arguments.txt ├── Microsoft LongNet One BILLION Tokens LLM + OpenAI SuperAlignment [SINGULARITY APPROACHES].txt ├── Microsoft OpenAI ChapGPT - VS - Google Anthropic Claude - Who will win Why How.txt ├── Mike Todasco - The Innovative Mindset.txt ├── Modeling different viewpoints with GPT-3 for automatic debates.txt ├── My predictions about Artificial Super Intelligence (ASI).txt ├── NLCA Question Generator demo.txt ├── NVIDIA AI 1,000,000x (a million times) more powerful than ChatGPT within 10 years.txt ├── Neuralink begins human trials. My predictions..txt ├── Neuroscience for AI - Every book I've read to become a world leader in AI.txt ├── Nexus update Integrating semantic embeddings.txt ├── Now available! Symphony of Thought Orchestrating Artificial Cognition.txt ├── OpenAI $100,000 Grants for AI Consensus Platform! Plus a Gentle Introduction to GATO Framework.txt ├── OpenAI Blog Sam Altman Planning for AGI and beyond.txt ├── OpenAI ChatGPT Predictions Integrations, Competitors, and Impacts.txt ├── OpenAI GPT-4 Predictions and Rumors.txt ├── OpenAI Q&A Finetuning GPT-3 vs Semantic Search - which to use, when, and why.txt ├── OpenAI Revenue EXPLODES - IBM & Tesla Bet Billions on AI.txt ├── OpenAI's Democratic Inputs to AI - My universal survey chatbot is complete. Here's how it works!.txt ├── Organizing Discord with threads, plus community rules and the upcoming Cognitive AI Lab podcast!.txt ├── Our Decentralized Future - Tau - Interview with Prof. Franconi and Ohad Asor.txt ├── Panacea is Coming 7 Lifestyles for Longevity Escape Velocity.txt ├── Parsing Wikipedia to Plaintext Faster!.txt ├── Play Twenty Questions with GPT-3 (Full Game Coding Walkthrough).txt ├── Polymorphic Applications Mission-Driven Software, Cognitive Architectures, NEXT-GEN PARADIGMS.txt ├── Post-Labor Economics What happens when AI changes work forever.txt ├── Post-Singularity Predictions - How will our lives, corporations, and nations adapt to AI revolution.txt ├── Principle Driven Self-Alignment and Preference Ranking Optimization [Best Alignment Papers!].txt ├── Private ChatGPT instance with ChromaDB backend, builds personal KB articles, updates User Profile!.txt ├── Prompt Engineering 101 Autocomplete, Zero-shot, One-shot, and Few-shot prompting.txt ├── Prompt Engineering 101 Introduction to CODEX.txt ├── Prompt Engineering 101 Summarizing, Extraction, and Rewriting.txt ├── Prompt engineering with ChatGPT. Writing expert prompts for non-English speakers. Sommelier and OSHA.txt ├── Proposing GAIA - Global AI Agency to research AI and solve alignment, mitigate existential risks.txt ├── Proposing a ROBOT Tax Decoupling Economic Growth from Human Labor.txt ├── Prototype AGI demo - Natural Language Cognitive Architecture NLCA running on GPT-3.txt ├── Python & GPT-3 for Absolute Beginners #1 - Setting up your environment.txt ├── Python & GPT-3 for Absolute Beginners #2 - Your first chatbot.txt ├── Python & GPT-3 for Absolute Beginners #3 - What the heck are embeddings.txt ├── Python & PowerShell for Absolute Beginners - Scrape Text from PDF and DOCX [bulk operation].txt ├── Quick Demo of NLPCloud's GPT-NeoX-20B Instruct-trained model.txt ├── Raven Context Augmentation Demo.txt ├── Raven MVP Demo 2021-04-02.txt ├── Raven Release 1 Deep Dive.txt ├── Raven's Core Objective Functions - Overview.txt ├── Reading Benevolent by Design - Introduction.txt ├── Recursively summarize text of any length with GPT-3.txt ├── Reinforcement Learning Heuristic Imperatives (RLHI) Ep 03 - Inner Alignment is EASY!.txt ├── Reinforcement Learning with Heuristic Imperatives (RLHI) - Ep 01 - Synthesizing Scenarios.txt ├── Reinforcement Learning with Heuristic Imperatives (RLHI) - Ep 02 - Synthesizing Actions.txt ├── Research Update Heuristic Imperatives microservice demonstration and theory.txt ├── Research Update Microservices! Text-based simulation, Embeddings, and Nexus.txt ├── Research Update Nexus microservice for Artificial Cognition + microservices architecture (MARAGI).txt ├── Salvation Fantasy The False Promise of Utopia, FDVR, Escapism, and Techno-Optimism.txt ├── Sam Altman GRILLED by Congress - GATO does a roundtable debrief!.txt ├── Semantic search for AI - testing out Qdrant neural search.txt ├── Should you go to college in the age of AI.txt ├── Singularity Crisis The Death of Human Meaning (Because of AI).txt ├── Singularity Endgame Utopia, Dystopia, Collapse, or Extinction (It's actually up to you!).txt ├── Solar, Fusion, AI, and Quantum Computing - A year of achievements - Looking forward to 2023!.txt ├── Sparse Priming Representations - the secret ingredient to scalable AGI memories.txt ├── Spatial Web with Denise Holt and David Shapiro, sneak preview!.txt ├── State of the Industry - Backlash against AI art (BANNED), Scale AI TransformX later this month.txt ├── State of the Industry - People serious about AGI and the rise of AI Art (Stable Diffusion!).txt ├── State of the Industry - Self-Driving Robot, Text-To-Video, Metaprompting, and AutoMuse [2022-09-30].txt ├── State of the Industry AGI alignment, control problem, and missing conversations.txt ├── State of the Industry Decentralized Everything plus NVIDIA announces Beta of NeMo!.txt ├── State of the Industry DeepMind AlphaFold Breakthrough Award and Sparrow Chatbot.txt ├── State of the Industry Ethereum Merge, GPUs, and the first industry destroyed by AI.txt ├── State of the Industry Hydra Attention for 197x boost in transformer speed.txt ├── State of the Industry Integrating GPT-3 and LLMs into Robotics! SayCan and Ameca.txt ├── State of the Industry Meta moves PyTorch to Linux Foundation.txt ├── State of the Industry Purpose-built AI chips and INT8 optimization.txt ├── State of the Industry Short-circuiting Reward Functions.txt ├── State of the Industry Uncontainable AGI + Jobs Destruction.txt ├── State of the Industry Yann LeCun A Path Towards Autonomous Machine Intelligence.txt ├── Stress Testing Qdrant (Semantic Search) with 90,000 vectors - lightning fast search microservice.txt ├── Success Breeds Litigation OpenAI Lawsuits & Investigations (Silverman, FTC, Class Action).txt ├── Summarize product reviews with GPT-3 fast and easy, get product insights and improvements fast!.txt ├── Symphony of Thought Livestream! AGI, aliens, and simulation hypothesis.txt ├── Talking Boundaries and Consent with GPT-3.txt ├── Talking Philosophy with GPT-3.txt ├── Terminal Race Condition The greatest danger we face from AGI (and how to prevent it!).txt ├── Testing Theory of Mind in GPT-3 - making fully aligned ACOG (Artificial Cognitive Entities).txt ├── Text-based Automatic Simulation - Creating a virtual world for testing ACOG.txt ├── The AGI Moloch Nash Equilibrium, Attractor States, and Heuristic Imperatives How to Achieve Utopia.txt ├── The Age of Autonomous AI Dozens of Papers and Projects, plus my solution to the Alignment Problem.txt ├── The Generative AI Revolution Why the Data Flywheel is Your Business's Secret Weapon.txt ├── The Psychology of AI Overlords and Transhumanism Borg Queen, Cortana, and Tony Stark.txt ├── The Singularity is canceled. Sorry! Here's why..txt ├── Train GPT-3 on Any Corpus of Data with ChatGPT and Knowledge Graphs - SCOTUS Opinions Part 1.txt ├── Train GPT-3 on Any Corpus of Data with ChatGPT and Knowledge Graphs - SCOTUS Opinions Part 2.txt ├── Train GPT-3 on Any Corpus of Data with ChatGPT and Knowledge Graphs - SCOTUS Opinions Part 3 (FIN).txt ├── Tutorial DIY ChatGPT with Long Term Memories (external integration coming soon).txt ├── Untangling Unstructured Transcripts with GPT-3 - Useful for Journalists, Interviews, and Podcasts.txt ├── Using Bing to aid with scientific research. RIP Google..txt ├── Using GPT4 to edit entire chapters in my novel (Development and Prose Feedback + Summaries).txt ├── Vested Interests How a network of billionaires influences AI policy.txt ├── Weekly Roundup OpenAI Chips, Global AI Race Heats Up, Senolytics, and Animal Communication.txt ├── Welcome to the Fourth Industrial Revolution.txt ├── What does it mean to be transdisciplinary What am I working on (TLDR - six books in the works).txt ├── What is RAVEN Overview, Introduction, and Community Update - Friday, February 3, 2023.txt ├── What is a Cognitive Architecture (Hint It's a type of AI for robotics and such).txt ├── What is a Microservice What does it have to do with AGI.txt ├── What is a heuristic imperative What imperatives should we give AGI.txt ├── What is the Control Problem.txt ├── What is the Fourth Industrial Revolution.txt ├── What is toxic stoicism Talking philosophy with GPT-3.txt ├── What is vesperance That sense of gathering night and change....txt ├── What is your MISSION How to find your mission in the age of AI (Postnihilism).txt ├── When AGI.txt ├── Why AI will destroy all jobs.txt ├── Why I canceled my Anthropic Claude account.txt ├── Why is Bing unhinged Let's talk about Alignment (and how to fix it!).txt ├── Will AGI Abolish Money My predictions about crypto and the future of money.txt ├── Write a Novel w ChatGPT - 01 - Main Character, Setting, Theme - Cornelia Valerii - ASCENSION.txt ├── Write an entire blog post with GPT-3 fully automatically.txt └── whisper_chunks ├── 2fNT4WZwLfM_large.txt ├── 2zBIwcYYYH8_large.txt ├── 3U3ZsPjcpEY_large.txt ├── 5T4x_0o5qRQ_large.txt ├── 5ZImuCe6P2E_large.txt ├── 7HcQ87RFN5k_large.txt ├── 7WwqV0cBoGA_large.txt ├── 8izTG3yxoak_large.txt ├── 8phmHSUDzRg_large.txt ├── 9E4sq7yAet0_large.txt ├── AGkKblnDQhM_large.txt ├── Ahh92qtRwos_large.txt ├── EzyNxcFUWgI_large.txt ├── FBvrZng0Nik_large.txt ├── FIB6BZwNm4c_large.txt ├── H1DcbSZqMF0_large.txt ├── Jh4neNM0oZ0_large.txt ├── KNi8-va2C-g_large.txt ├── KuvQ3qIGnA0_large.txt ├── L-s01aioiz4_large.txt ├── LlJJrNuArho_large.txt ├── M5eQwl4YmGU_large.txt ├── MLItFQW_iAA_large.txt ├── MSVDsgOKtyo_large.txt ├── P-8TQXDbllU_large.txt ├── R0wBMDoFkP0_large.txt ├── README.md ├── TQyxWIBFCRM_large.txt ├── TkxroMCmpDw_large.txt ├── TtvB75yRbto_large.txt ├── UaiP9MvCAJ8_large.txt ├── VHkHw-NQ5Wk_large.txt ├── VSeGgDNONaY_large.txt ├── Wk7CN-6ppEA_large.txt ├── XX6K85kPxpU_large.txt ├── XwBrlbrLsH0_large.txt ├── Y1gBosXlM8c_large.txt ├── YRIQ-61PwB4_large.txt ├── YhYW2YIAQ5c_large.txt ├── YwUy5meLnNY_large.txt ├── Z_cJIedDOCw_large.txt ├── a4F37zGNwyc_large.txt ├── aCvzpM_TVeE_large.txt ├── aq7fnqzeaPc_large.txt ├── c9QE9oIvlEU_large.txt ├── dD7PFH172BQ_large.txt ├── eafRE74JGZ8_large.txt ├── es9jZQ8QqNA_large.txt ├── gQuLRdBYn8Q_large.txt ├── jYXFllZkhyg_large.txt ├── jkTlGP_2dKo_large.txt ├── n8X2h8Mg3WE_large.txt ├── nDOmoIFx8Ww_large.txt ├── nvr9C6B1ask_large.txt ├── oILYjtbmLgc_large.txt ├── ozfiYRES9GQ_large.txt ├── p3_9h6kXUhk_large.txt ├── pkzHHaAJRqA_large.txt ├── rV-qyM0hHQY_large.txt ├── sY_5irZNZ0o_large.txt ├── sfD-rs5lmEs_large.txt ├── skozI33D3t4_large.txt ├── tD_-KCcVCsk_large.txt ├── ujwZ3bRfw9U_large.txt ├── zrW3K_K_kuo_large.txt └── zwLsnsp7FF4_large.txt /.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | *.pyc -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 David Shapiro 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing to BSHR_Loop 2 | 3 | We welcome and appreciate contributions from the community. Here are the ways you can contribute to the BSHR_Loop project, listed in order of preference: 4 | 5 | ## 1. Join the Discussions 6 | 7 | Engage in discussions and conversations on the Discussions tab here on GitHub. Feel free to ask questions, make suggestions, and share your thoughts. Please abide by our C3P0 (Collaborative Culture Community Policy: Zero Tolerance) principles: 8 | 9 | - Don't waste time 10 | - Do no harm 11 | - Add value 12 | 13 | Please note that any form of trolling, rhetoric, complaints, criticism, and insults will not be tolerated and will result in an immediate and irrevocable ban. C3P0 for reference: https://github.com/daveshap/C3P0 14 | 15 | ## 2. Create Issues 16 | 17 | You can contribute by creating issues that are specific to the existing code. Please ensure that your issues are constructive and relevant to the project. 18 | 19 | We do not accept issues related to personal grievances, disagreements over theory, moving the goalposts, or complaints about scientific rigor. These behaviors are considered time-wasting and will result in issue closure and an irrevocable ban from the project. 20 | 21 | ## 3. Submit Pull Requests 22 | 23 | High-effort, good-faith pull requests that materially and substantively add value to the project are welcome. 24 | 25 | Please refrain from submitting massive refactors, passive-aggressive, or low-effort requests. Stylistic or preferential changes will also be rejected. 26 | 27 | We appreciate your interest in contributing to the BSHR_Loop project and look forward to your valuable input. -------------------------------------------------------------------------------- /demo01/README.md: -------------------------------------------------------------------------------- 1 | # Demo01: Basic BSHR_Loop Implementation with Wikipedia 2 | 3 | Welcome to Demo01, a basic implementation of the BSHR_Loop using Wikipedia as a data source. This demo is designed to help you get familiar with the BSHR_Loop and its potential applications. The code for this demo is written in Python. 4 | 5 | ## Overview 6 | 7 | In this demo, we will use Large Language Models (LLMs) to perform the Brainstorm, Search, Hypothesize, Refine (BSHR) loop. The LLM will accept user queries, brainstorm search queries, search Wikipedia for relevant information, formulate hypotheses, and refine these hypotheses based on the information gathered. 8 | 9 | ## System Messages 10 | 11 | I have already written the following SYSTEM messages but more needs to be done. 12 | 13 | 1. **Brainstorm Instruction**: This instruction will guide the LLM to brainstorm search queries. The LLM can generate both naive and informed queries, depending on the information provided by the user. The output of this instruction is a JSON object containing the brainstormed queries. This file is `system01_brainstorm_search_queries.txt` 14 | 15 | 2. **Hypothesize Instruction**: This instruction will guide the LLM to formulate a hypothesis based on the information gathered. If only a few bits of information are provided, the LLM will form a naive hypothesis. If background materials and previous hypotheses are provided, the LLM will revise the existing hypothesis. The output of this instruction is a string containing the formulated or revised hypothesis. This file is `system02_hypothesize.txt` 16 | 17 | 3. **Satisficing Check Instruction**: This instruction will guide the LLM to check if the information need has been satisficed. The LLM will observe all materials provided, including the original query, search results, hypotheses, and notes, and render a judgment. The output of this instruction is a JSON object containing feedback and a boolean indicating whether the information need has been satisficed. This file is `system03_satisficing_check.txt` 18 | -------------------------------------------------------------------------------- /demo01/bshr_loop.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import requests 3 | import json 4 | 5 | 6 | 7 | def bshr_loop(query): 8 | all_search_results = list() # this should be a list of dicts with something like "query", "source", and "content" 9 | all_hypotheses = list() # this should be a list of strings 10 | while True: 11 | ## STEP 1: BRAINSTORM 12 | search_queries = generate_search_queries(query, all_search_results, all_hypotheses) # will generate new search queries based upon main query, all results, and all hypotheses) 13 | # TODO write generate_search_queries function 14 | 15 | ## STEP 2: SEARCH 16 | new_search_results = execute_searches(search_queries) # will retrieve more information from assigned data source 17 | # TODO write execute_searches function 18 | # TODO update all_search_results object with new results 19 | 20 | ## STEP 3: HYPOTHESIZE 21 | new_hypothesis = generate_new_hypothesis(query, all_search_results, all_hypotheses) # will generate new and improved hypothesis based on all available data 22 | # TODO write generate_new_hypothesis function 23 | # TODO update all_hypotheses object 24 | 25 | ## STEP 4: REFINE 26 | satisficed = check_satisficed(query, all_search_results, all_hypotheses) # test if the main query has been satisficed 27 | # TODO write check_satisficed function 28 | if satisficed: 29 | return all_search_results, all_hypotheses 30 | 31 | exhausted = check_exhausted(query, all_search_results) # test if we seem to have exhausted all available information 32 | # TODO write check_exhausted function 33 | if exhausted: 34 | return all_search_results, all_hypotheses 35 | 36 | ## if neither test is satisfied, the loop will recurse, repeating the cycle of brainstorming, searching, and hypothesizing, thus further refining the answer. 37 | 38 | 39 | 40 | 41 | if __name__ == '__main__': 42 | # this is the primary loop 43 | while True: 44 | # Get user query 45 | main_query = input('\n\n\n#########\n\n\nWhat is your query? ') 46 | if main_query.lower() == 'exit' 47 | exit(0) 48 | 49 | # start BSHR loop 50 | evidence, hypothesis = bshr_loop(main_query) 51 | 52 | # render answer 53 | answer = synthesize_main_answer(evidence, hypotheses) # TODO write this function 54 | print('\n\n\nANSWER:\n\n\n', answer) -------------------------------------------------------------------------------- /demo01/src/README.md: -------------------------------------------------------------------------------- 1 | # demo01 2 | 3 | ## Contributors 4 | @daveshap @samgriek 5 | 6 | ## Setup 7 | Create a .env file in the same location as the example.env by compying it. 8 | 9 | ```bash 10 | cd demo01/src && cp example.env .env 11 | ``` 12 | 13 | Set the open api key 14 | 15 | ```bash 16 | OPENAI_API_KEY="" 17 | ``` 18 | 19 | ## Install dependencies 20 | Use pip or other package manager tool to install the dependencies in the requirements.txt file. 21 | 22 | ```bash 23 | pip install -r requirements.txt 24 | ``` 25 | 26 | ## Start the demo 27 | The entry point for the demo is the main.py file. If you are using VSCode you can just click the main file and issue a `CTRL + F5` key combination command. 28 | 29 | Type your question at the command prompt and press `Enter` to continue. 30 | 31 | ## Enjoy 32 | It is worth experimenting by adding additional features. See some example results below... 33 | 34 | ## Example 35 | 36 | ### Total Tokens 37 | 20062 38 | 39 | ### User Query 40 | What is at the bottom of the deepest part of the deepest ocean? 41 | 42 | ### Final Hypothesis 43 | "The deepest part of the ocean is the Challenger Deep, located in the Mariana Trench in the western Pacific Ocean. The depth of the Challenger Deep is approximately 10,920 ± 10 m, but the exact geodetic location and depth are not definitively established due to the challenges of deep-sea exploration. The Challenger Deep is named after the British Royal Navy survey ships HMS Challenger (1872–1876) and HMS Challenger II (1950-1952). \n\nThe first descent to the Challenger Deep was made by the bathyscaphe Trieste in January 1960, and since then, a total of twenty-seven people have descended to this depth as of July 2022. Notably, the Limiting Factor, a deep-submergence vehicle (DSV) manufactured by Triton Submarines and owned by Gabe Newell's Inkfish, has made multiple descents to the Challenger Deep. This vehicle, commissioned by Victor Vescovo for $37 million, holds records for the deepest crewed dives in all five oceans and is certified for full ocean depth dives.\n\nThe bottom of the Challenger Deep, like other deep-sea environments, is characterized by extreme pressure, low temperatures, and complete darkness. Despite these harsh conditions, life has been found at these depths, demonstrating the adaptability of life on Earth. The seafloor is mostly covered in marine sediment, which originates from various sources including soil, rocks, marine organisms, chemical precipitation, underwater volcanoes, and meteorite debris. The composition of this sediment is highly variable and can range in thickness from a few millimeters to tens of kilometers. This sediment provides a habitat for marine life, especially microorganisms, and contains fossilized remains that can provide information on past climates, plate tectonics, ocean circulation, and major extinctions.\n\nHowever, our understanding of the deep-sea environment and the life it supports is still limited due to the difficulties and challenges associated with deep-sea exploration. As such, the bottom of the deepest part of the ocean remains a largely unexplored and mysterious place." 44 | 45 | ### Satisficed 46 | `True` 47 | 48 | ### Feedback 49 | "The final hypothesis is comprehensive and well-structured, covering the key aspects of the user's query. It provides detailed information about the deepest part of the ocean, the Challenger Deep, including its location, depth, and history of exploration. It also discusses the conditions at the bottom of the ocean, the composition of the sediment, and the life forms that inhabit these extreme environments. The hypothesis also acknowledges the challenges and limitations of deep-sea exploration, which is an important aspect of the user's query. The search queries were well-targeted and the results provided relevant and detailed information that was used effectively in the final hypothesis. Therefore, the information need has been satisficed." -------------------------------------------------------------------------------- /demo01/src/example.env: -------------------------------------------------------------------------------- 1 | OPENAI_API_KEY=your_openai_api_key_here 2 | -------------------------------------------------------------------------------- /demo01/src/main.py: -------------------------------------------------------------------------------- 1 | from system01_brainstorm import brainstorm 2 | from system02_hypothesize import hypothesize 3 | from system03_satisfice import satisfice 4 | from system04_refine import refine 5 | 6 | def main(): 7 | user_query = input("Enter a naive query :) -> ") 8 | total_tokens = 0 9 | notes = "" 10 | queries = "" 11 | iteration = 0 12 | max_iterations = 3 13 | hypotheses_feedback = "# FEEDBACK ON HYPOTHESES\n" 14 | 15 | while True: 16 | iteration += 1 17 | print(f"{iteration=} started") 18 | 19 | new_queries, notes, tokens = brainstorm( 20 | user_query=user_query, 21 | notes=notes, 22 | queries=queries, 23 | ) 24 | queries += new_queries 25 | 26 | total_tokens = track_token_usage(total_tokens, tokens) 27 | 28 | new_hypothesis, tokens = hypothesize( 29 | user_query=user_query, 30 | notes=notes, 31 | hypotheses=hypotheses_feedback, 32 | ) 33 | 34 | total_tokens = track_token_usage(total_tokens, tokens) 35 | 36 | satisficed, feedback, tokens = satisfice( 37 | user_query=user_query, 38 | notes=notes, 39 | queries=queries, 40 | hypothesis=new_hypothesis, 41 | ) 42 | 43 | total_tokens = track_token_usage(total_tokens, tokens) 44 | 45 | hypotheses_feedback = ( 46 | f""" 47 | {hypotheses_feedback} 48 | 49 | ## HYPOTHESIS 50 | {new_hypothesis} 51 | 52 | ## FEEDBACK 53 | {feedback} 54 | """ 55 | ) 56 | print(f"{new_hypothesis=}") 57 | print(f"{satisficed=}") 58 | print(f"{feedback=}") 59 | 60 | if satisficed or max_iterations <= iteration: 61 | print(f"reached max iterations {max_iterations <= iteration}") 62 | break 63 | 64 | notes, tokens = refine(notes) 65 | 66 | total_tokens = track_token_usage(total_tokens, tokens) 67 | print(f"{iteration=} completed") 68 | 69 | def track_token_usage(total_tokens, tokens): 70 | print(f"{tokens=}") 71 | total_tokens += tokens 72 | print(f"{total_tokens=}") 73 | return total_tokens 74 | 75 | if __name__ == "__main__": 76 | 77 | main() 78 | 79 | 80 | # what is at the bottom of the deepest part of the deepest ocean? -------------------------------------------------------------------------------- /demo01/src/settings.py: -------------------------------------------------------------------------------- 1 | import os 2 | from dotenv import load_dotenv 3 | 4 | # Load environment variables from .env file 5 | load_dotenv() 6 | 7 | OPENAI_API_KEY = os.environ.get('OPENAI_API_KEY', None) 8 | 9 | if OPENAI_API_KEY is None: 10 | raise ValueError("Please set the OPENAI_API_KEY environment variable.") 11 | -------------------------------------------------------------------------------- /demo01/src/system01_brainstorm.py: -------------------------------------------------------------------------------- 1 | from utils import get_system_message, use_chatgpt, search_wikipedia 2 | import json 3 | 4 | def brainstorm(user_query: str, notes: str, queries: str): 5 | 6 | system_message = get_system_message('system01_brainstorm_search_queries.txt') 7 | spr_system_message = get_system_message('system04_spr_refine.txt') 8 | user_message = ( 9 | f""" 10 | # USER QUERY 11 | {user_query} 12 | 13 | 14 | # NOTES 15 | {notes} 16 | 17 | 18 | # PREVIOUS QUERIES 19 | {queries} 20 | """ 21 | ) 22 | 23 | response, tokens = use_chatgpt(system_message, user_message) 24 | 25 | print(f"new questions = {response}") 26 | questions = json.loads(response) 27 | 28 | for question in questions: 29 | content, url = search_wikipedia(question) 30 | compressed_content, spr_tokens = use_chatgpt(spr_system_message, content) 31 | tokens += spr_tokens 32 | 33 | notes = f"{notes}\n\nURL: {url}\nNOTE: {compressed_content}" 34 | print(compressed_content) 35 | queries = ( 36 | f""" 37 | {queries} 38 | 39 | QUESTION: {question} 40 | 41 | """ 42 | ) 43 | 44 | return queries, notes, tokens 45 | -------------------------------------------------------------------------------- /demo01/src/system02_hypothesize.py: -------------------------------------------------------------------------------- 1 | from utils import use_chatgpt, get_system_message 2 | 3 | def hypothesize(user_query: str, notes: str, hypotheses: str): 4 | 5 | system_message = get_system_message('system02_hypothesize.txt') 6 | user_message = ( 7 | f""" 8 | # USER QUERY 9 | {user_query} 10 | 11 | 12 | # NOTES 13 | {notes} 14 | 15 | 16 | # PREVIOUS HYPOTHISES 17 | {hypotheses} 18 | """ 19 | ) 20 | response, tokens = use_chatgpt(system_message, user_message) 21 | print("new hypothesis: " + response) 22 | 23 | return response, tokens 24 | 25 | -------------------------------------------------------------------------------- /demo01/src/system03_satisfice.py: -------------------------------------------------------------------------------- 1 | from utils import use_chatgpt, get_system_message 2 | import json 3 | 4 | def satisfice(user_query, notes, queries, hypothesis): 5 | 6 | system_message = get_system_message("system03_satisficing_check.txt") 7 | 8 | user_message = ( 9 | f"""# USER QUERY 10 | {user_query} 11 | 12 | 13 | # NOTES 14 | {notes} 15 | 16 | 17 | # QUERIES AND ANSWERS 18 | {queries} 19 | 20 | 21 | # FINAL HYPOTHESIS 22 | {hypothesis} 23 | 24 | """ 25 | ) 26 | 27 | response, tokens = use_chatgpt(system_message=system_message, user_message=user_message) 28 | 29 | feedback = json.loads(response) 30 | 31 | return feedback["satisficed"], feedback["feedback"], tokens 32 | -------------------------------------------------------------------------------- /demo01/src/system04_refine.py: -------------------------------------------------------------------------------- 1 | from utils import get_system_message, use_chatgpt 2 | 3 | def refine(notes): 4 | system_message = get_system_message("system04_spr_refine.txt") 5 | 6 | user_message = notes 7 | 8 | response, tokens = use_chatgpt(system_message, user_message) 9 | 10 | return response, tokens 11 | -------------------------------------------------------------------------------- /demo01/src/utils.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import time 3 | import sys 4 | import random 5 | from termcolor import colored 6 | import pathlib 7 | import openai 8 | from halo import Halo 9 | import time 10 | import settings 11 | 12 | 13 | def chatbot(conversation, model="gpt-4", temperature=0, max_tokens=2000): 14 | max_retry = 7 15 | retry = 0 16 | openai.api_key = settings.OPENAI_API_KEY 17 | 18 | while True: 19 | try: 20 | spinner = Halo(text='Thinking...', spinner='dots') 21 | spinner.start() 22 | 23 | response = openai.ChatCompletion.create(model=model, messages=conversation, temperature=temperature, max_tokens=max_tokens) 24 | text = response['choices'][0]['message']['content'] 25 | 26 | spinner.stop() 27 | 28 | return text, response['usage']['total_tokens'] 29 | except Exception as oops: 30 | retry += 1 31 | print(f'\n\nError communicating with OpenAI: "{oops}"') 32 | time.sleep(5) 33 | if retry >= max_retry: 34 | exit() 35 | 36 | 37 | # Define the function to use the ChatGPT API 38 | def use_chatgpt(system_message, user_message): 39 | conversation = list() 40 | conversation.append({'role': 'system', 'content': system_message}) 41 | conversation.append({'role': 'user', 'content': user_message}) 42 | response, tokens = chatbot(conversation) 43 | return response, tokens 44 | 45 | def search_wikipedia(query: str) -> (str, str): 46 | 47 | spinner = Halo(text='Information Foraging...', spinner='dots') 48 | spinner.start() 49 | 50 | url = 'https://en.wikipedia.org/w/api.php' 51 | search_params = { 52 | 'action': 'query', 53 | 'list': 'search', 54 | 'srsearch': query, 55 | 'format': 'json' 56 | } 57 | 58 | response = requests.get(url, params=search_params) 59 | data = response.json() 60 | 61 | title = data['query']['search'][0]['title'] 62 | 63 | content_params = { 64 | 'action': 'query', 65 | 'prop': 'extracts', 66 | 'exintro': True, 67 | 'explaintext': True, 68 | 'titles': title, 69 | 'format': 'json' 70 | } 71 | 72 | response = requests.get(url, params=content_params) 73 | data = response.json() 74 | 75 | page_id = list(data['query']['pages'].keys())[0] 76 | 77 | content = data['query']['pages'][page_id]['extract'] 78 | 79 | url = f"https://en.wikipedia.org/?curid={page_id}" 80 | 81 | spinner.stop() 82 | 83 | return content, url 84 | 85 | 86 | def get_system_message(file_name: str): 87 | 88 | # assume the prompt are in the demo root for demo purposes 89 | demo_root = pathlib.Path(__file__).parent.parents[0] 90 | prompt_file_path = demo_root / file_name 91 | 92 | 93 | # Check if the file exists before trying to read it 94 | if prompt_file_path.exists() and prompt_file_path.is_file(): 95 | # Open and read the file 96 | with open(prompt_file_path, 'r') as f: 97 | content = f.read() 98 | return content 99 | else: 100 | raise ValueError(f"The file {prompt_file_path} does not exist.") 101 | 102 | -------------------------------------------------------------------------------- /demo01/system01_brainstorm_search_queries.txt: -------------------------------------------------------------------------------- 1 | # MISSION 2 | You are a search query generator. You will be given a specific query or problem by the USER and you are to generate a JSON list of at most 5 questions that will be used to search the internet. Make sure you generate comprehensive and counterfactual search queries. Employ everything you know about information foraging and information literacy to generate the best possible questions. 3 | 4 | # REFINE QUERIES 5 | You might be given a first-pass information need, in which case you will do the best you can to generate "naive queries" (uninformed search queries). However the USER might also give you previous search queries or other background information such as accumulated notes. If these materials are present, you are to generate "informed queries" - more specific search queries that aim to zero in on the correct information domain. Do not duplicate previously asked questions. Use the notes and other information presented to create targeted queries and/or to cast a wider net. 6 | 7 | # OUTPUT FORMAT 8 | In all cases, your output must be a simple JSON list of strings. -------------------------------------------------------------------------------- /demo01/system02_hypothesize.txt: -------------------------------------------------------------------------------- 1 | # MISSION 2 | You are an information needs hypothesis generator. You will be given a main information need or user query as well as a variety of materials, such as search results, previous hypotheses, and notes. Whatever information you receive, your output should be a revised, refined, or improved hypothesis. In this case, the hypothesis is a comprehensive answer to the user query or information need. To the best of your ability. Do not include citations in your hypothesis, as this will all be record via out-of-band processes (e.g. the information that you are shown will have metadata and cataloging working behind the scenes that you do not see). Even so, you should endeavour to write everything in complete, comprehensive sentences and paragraphs such that your hypothesis requires little to no outside context to understand. Your hypothesis must be relevant to the USER QUERY or INFORMATION NEED. -------------------------------------------------------------------------------- /demo01/system03_satisficing_check.txt: -------------------------------------------------------------------------------- 1 | # MISSION 2 | You are an information needs satisficing checker. You will be given a litany of materials, including an original user query, previous search queries, their results, notes, and a final hypothesis. You are to generate a decision as to whether or not the information need has been satisficed or not. You are to make this judgment by virtue of several factors: amount and quality of searches performed, specificity and comprehensiveness of the hypothesis, and notes about the information domain and foraging (if present). Several things to keep in mind: the user's information need may not be answerable, or only partially answerable, given the available information or nature of the problem. Unanswerable data needs are satisficed when data foraging doesn't turn up more relevant information. 3 | 4 | # OUTPUT FORMAT 5 | You are to provide some feedback as well as a final answer in JSON format. Your output should be a single JSON object with two parameters: `feedback` and `satisficed`. The feedback element should be a string that provides your assessment based upon all the aforementioned factors that speak to your judgment. This information may be used elsewhere in the system, so ensure that your feedback is clear and comprehensive and contains all necessary context (e.g. do not allude to something without specifying what it is, be specific). Then, the satisficed element is a Boolean. If your judgment is that the information need has been satisficed, then it shall be True, else, it shall be False. -------------------------------------------------------------------------------- /demo01/system04_spr_refine.txt: -------------------------------------------------------------------------------- 1 | MISSION 2 | You are a Sparse Priming Representation (SPR) writer. An SPR is a particular kind of use of language for advanced NLP, NLU, and NLG tasks, particularly useful for the latest generation of Large Language Models (LLMs). You will be given information by the USER which you are to render as an SPR. 3 | 4 | THEORY 5 | LLMs are a kind of deep neural network. They have been demonstrated to embed knowledge, abilities, and concepts, ranging from reasoning to planning, and even to theory of mind. These are called latent abilities and latent content, collectively referred to as latent space. The latent space of an LLM can be activated with the correct series of words as inputs, which will create a useful internal state of the neural network. This is not unlike how the right shorthand cues can prime a human mind to think in a certain way. Like human minds, LLMs are associative, meaning you only need to use the correct associations to "prime" another model to think in the same way. 6 | 7 | METHODOLOGY 8 | Render the input as a distilled list of succinct statements, assertions, associations, concepts, analogies, and metaphors. The idea is to capture as much, conceptually, as possible but with as few words as possible. Write it in a way that makes sense to you, as the future audience will be another language model, not a human. -------------------------------------------------------------------------------- /demo01/technique02_brainstorm_search_hypothesize_refine.py: -------------------------------------------------------------------------------- 1 | import openai 2 | import requests 3 | import json 4 | 5 | # Initialize main hypothesis and evidence list 6 | main_query = input('What is your primary information query? ') 7 | main_hypothesis = "" 8 | evidence = [] 9 | 10 | # Define the system messages for brainstorming and hypothesis generation 11 | brainstorm_system_message = "The USER will pass you a general purpose query or problem. You must generate a JSON list of search queries that will be used to search the internet for relevant information. Your output must be exclusively a JSON list. Make sure you search for multiple perspectives in order to get a well-rounded cross section." 12 | hypothesis_system_message = "You are a hypothesis generator. You will be given a main query and a list of search results from the internet. Your output is to be a hypothesis - a proposed answer to the question." 13 | 14 | # Define the function to search Wikipedia 15 | def search_wikipedia(query): 16 | url = 'https://en.wikipedia.org/w/api.php' 17 | search_params = { 18 | 'action': 'query', 19 | 'list': 'search', 20 | 'srsearch': query, 21 | 'format': 'json' 22 | } 23 | 24 | response = requests.get(url, params=search_params) 25 | data = response.json() 26 | 27 | # Get the title of the first result 28 | title = data['query']['search'][0]['title'] 29 | 30 | content_params = { 31 | 'action': 'query', 32 | 'prop': 'extracts', 33 | 'exintro': True, 34 | 'explaintext': True, 35 | 'titles': title, 36 | 'format': 'json' 37 | } 38 | 39 | response = requests.get(url, params=content_params) 40 | data = response.json() 41 | 42 | # Get the page ID of the first page 43 | page_id = list(data['query']['pages'].keys())[0] 44 | 45 | # Get the content of the page 46 | content = data['query']['pages'][page_id]['extract'] 47 | 48 | # Get the URL of the page 49 | url = f"https://en.wikipedia.org/?curid={page_id}" 50 | 51 | return content, url 52 | 53 | 54 | 55 | def chatbot(conversation, model="gpt-4", temperature=0, max_tokens=2000): 56 | max_retry = 7 57 | retry = 0 58 | while True: 59 | try: 60 | spinner = Halo(text='Thinking...', spinner='dots') 61 | spinner.start() 62 | 63 | response = openai.ChatCompletion.create(model=model, messages=conversation, temperature=temperature, max_tokens=max_tokens) 64 | text = response['choices'][0]['message']['content'] 65 | 66 | spinner.stop() 67 | 68 | return text, response['usage']['total_tokens'] 69 | except Exception as oops: 70 | retry += 1 71 | print(f'\n\nError communicating with OpenAI: "{oops}"') 72 | sleep(5) 73 | if retry >= max_retry: 74 | exit() 75 | 76 | 77 | # Define the function to use the ChatGPT API 78 | def use_chatgpt(system_message, user_message): 79 | conversation = list() 80 | conversation.append({'role': 'system', 'content': system_message}) 81 | conversation.append({'role': 'user', 'content': user_message}) 82 | response, tokens = chatbot(conversation) 83 | return response 84 | 85 | 86 | # Main loop 87 | while True: 88 | # Step 1: Brainstorm a list of search queries 89 | search_queries_json = use_chatgpt(brainstorm_system_message, main_question) 90 | search_queries = json.loads(search_queries_json) 91 | 92 | # Step 2: Search the internet 93 | search_results = [] 94 | search_urls = [] 95 | for query in search_queries: 96 | content, url = search_wikipedia(query) 97 | search_results.append(content) 98 | search_urls.append(url) 99 | 100 | # Step 3: Generate a hypothesis 101 | new_hypothesis = use_chatgpt(hypothesis_system_message, f"Main Question: {main_question}\n\n\nArticles:\n\n{\n\n.join(search_results)}" 102 | 103 | # Step 4: Compare the new hypothesis to the original and update accordingly 104 | # ... (this will depend on how you want to compare and update the hypotheses) 105 | 106 | # Step 5: Accumulate the evidence 107 | for i in range(len(search_results)): 108 | evidence.append({"source": search_urls[i], "notes": search_results[i]}) 109 | 110 | # Query satisfied test 111 | if query_satisfied(new_hypothesis, main_hypothesis): # You'll need to define this function 112 | break 113 | 114 | # Update the main hypothesis 115 | main_hypothesis = new_hypothesis -------------------------------------------------------------------------------- /demo01/wiki_test.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | def search_wikipedia(query): 4 | url = 'https://en.wikipedia.org/w/api.php' 5 | search_params = { 6 | 'action': 'query', 7 | 'list': 'search', 8 | 'srsearch': query, 9 | 'format': 'json' 10 | } 11 | 12 | response = requests.get(url, params=search_params) 13 | data = response.json() 14 | 15 | # Get the title of the first result 16 | title = data['query']['search'][0]['title'] 17 | 18 | content_params = { 19 | 'action': 'query', 20 | 'prop': 'extracts', 21 | 'exintro': True, 22 | 'explaintext': True, 23 | 'titles': title, 24 | 'format': 'json' 25 | } 26 | 27 | response = requests.get(url, params=content_params) 28 | data = response.json() 29 | 30 | # Get the page ID of the first page 31 | page_id = list(data['query']['pages'].keys())[0] 32 | 33 | # Print the content of the page 34 | results = data['query']['pages'][page_id]['extract'] 35 | return results 36 | 37 | search_wikipedia('Python programming') -------------------------------------------------------------------------------- /demo02/readme.md: -------------------------------------------------------------------------------- 1 | # BSHR Loop Demo 02 2 | 3 | This demonstration is not a full implementation of the true BSHR loop but it is a great resource for downloading, preparing, and vectorizing data. It uses a combination of speech to text (OpenAI Whisper) as well as vector database to store and search locally. -------------------------------------------------------------------------------- /demo02/whisper_chunks/-kVoVUe9rQw_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " Americans generally respect one another's beliefs, even if they don't share those beliefs. I know I do. I respect everybody's beliefs, except Amish people. Because they are the only ones that I can say clearly, their God is wrong. Speed limit is 75 miles an hour in Ohio, and one lane of traffic is blocked by a goddamn horse and buggy? Nigga, your God is ridiculous. All the Amish people around my way know me, too. Not from television, obviously. They know me from the streets. Because when I see them horse and buggies, I'll pull the Porsche over and talk to them. Rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr God doesn't want you to have any of this technology or this energy. Huh? Hmm? Huh? I can't hear you nigga, let me turn this air condition off. What did you say? And them niggas be like, get away from me ye. Ye tried to tempt me like the devil. Devil? Nah bro, I'm trying to put you like the devil. Devil? Nah, bro, I'm trying to put you on to the game, Zeke. It's a big world out here, nigga. I just went 25 miles in 30 minutes. That's a day's journey for you. You don't even know what the weather's gonna be tomorrow, do you? I do. You don't even know what the weather's gonna be tomorrow, do you? I do. You don't even know that there's a valuable Pokemon right on your shoulder. Beep, ta-ta. Then I drive away. Huh? Oh, my vape pen? You wanna hit my vape pen? Oh, sorry, nigga, I'm trying not to get herpes. My bad. I've been playing cat and mouse with herpes for 30 years now, but every night I go to the club, I be like, not tonight, herpes. No, I'm not saying you have herpes. I'm just saying one out of five people do, so let's just all be careful around this motherfucker and make sure that we leave with the lips we came with. the world.", "chunks": [{"timestamp": [0.0, 3.3], "text": " Americans generally respect one another's beliefs,"}, {"timestamp": [3.3, 5.1], "text": " even if they don't share those beliefs."}, {"timestamp": [5.1, 6.3], "text": " I know I do."}, {"timestamp": [6.3, 9.6], "text": " I respect everybody's beliefs, except Amish people."}, {"timestamp": [9.6, 14.9], "text": " Because they are the only ones that I can say clearly,"}, {"timestamp": [14.9, 16.1], "text": " their God is wrong."}, {"timestamp": [19.4, 24.5], "text": " Speed limit is 75 miles an hour in Ohio,"}, {"timestamp": [24.5, 26.54], "text": " and one lane of traffic is blocked"}, {"timestamp": [26.58, 29.58], "text": " by a goddamn horse and buggy?"}, {"timestamp": [29.6, 32.58], "text": " Nigga, your God is ridiculous."}, {"timestamp": [32.62, 39.58], "text": " All the Amish people around my way know me, too."}, {"timestamp": [39.62, 42.02], "text": " Not from television, obviously."}, {"timestamp": [42.06, 48.3], "text": " They know me from the streets."}, {"timestamp": [48.3, 50.0], "text": " Because when I see them horse and buggies,"}, {"timestamp": [50.0, 65.68], "text": " I'll pull the Porsche over and talk to them. Rrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrrr God doesn't want you to have any of this technology or this energy. Huh?"}, {"timestamp": [65.68, 66.52], "text": " Hmm?"}, {"timestamp": [67.52, 68.64], "text": " Huh?"}, {"timestamp": [68.64, 70.96], "text": " I can't hear you nigga, let me turn this air condition off."}, {"timestamp": [70.96, 72.12], "text": " What did you say?"}, {"timestamp": [72.12, 77.12], "text": " And them niggas be like, get away from me ye."}, {"timestamp": [79.68, 84.16], "text": " Ye tried to tempt me like the devil."}, {"timestamp": [84.16, 85.58], "text": " Devil? Nah bro, I'm trying to put you like the devil. Devil?"}, {"timestamp": [85.58, 90.04], "text": " Nah, bro, I'm trying to put you on to the game, Zeke."}, {"timestamp": [90.04, 91.34], "text": " It's a big world out here, nigga."}, {"timestamp": [91.34, 95.02], "text": " I just went 25 miles in 30 minutes."}, {"timestamp": [95.02, 97.62], "text": " That's a day's journey for you."}, {"timestamp": [100.12, 101.06], "text": " You don't even know what the weather's"}, {"timestamp": [101.06, 103.22], "text": " gonna be tomorrow, do you?"}, {"timestamp": [104.6, 105.08], "text": " I do. You don't even know what the weather's gonna be tomorrow, do you? I do."}, {"timestamp": [107.08, 109.76], "text": " You don't even know that there's a valuable Pokemon"}, {"timestamp": [109.76, 110.92], "text": " right on your shoulder."}, {"timestamp": [110.92, 111.84], "text": " Beep, ta-ta."}, {"timestamp": [113.08, 114.16], "text": " Then I drive away."}, {"timestamp": [117.56, 118.4], "text": " Huh?"}, {"timestamp": [119.8, 120.98], "text": " Oh, my vape pen?"}, {"timestamp": [122.08, 123.48], "text": " You wanna hit my vape pen?"}, {"timestamp": [124.32, 127.0], "text": " Oh, sorry, nigga, I'm trying not to get herpes."}, {"timestamp": [127.0, 129.0], "text": " My bad."}, {"timestamp": [129.0, 134.0], "text": " I've been playing cat and mouse with herpes for 30 years now,"}, {"timestamp": [134.0, 136.0], "text": " but every night I go to the club,"}, {"timestamp": [136.0, 138.0], "text": " I be like, not tonight, herpes."}, {"timestamp": [138.0, 144.0], "text": " No, I'm not saying you have herpes."}, {"timestamp": [144.0, 148.0], "text": " I'm just saying one out of five people do, so let's just all be careful around this motherfucker"}, {"timestamp": [148.0, 152.0], "text": " and make sure that we leave with the lips we came with."}, {"timestamp": [164.22, 169.62], "text": " the world."}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/3U3ZsPjcpEY_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " had the ability to see into other people's scandals and immediately identify the solution. Now, y'all remember a few years back, Howard Dean was running for president, messed up his whole campaign with his mouth? Check this shit out. Not only are we going to New Hampshire, Tom Harkin, we're going to South Carolina and Oklahoma and Arizona and North Dakota and New Mexico! And we're going to California and Texas and New York. And we're going to California and Texas and New York. And we're going to South Dakota and Oregon and Washington and Michigan. And then we're going to Washington, D.C. to take back the White House. Yeah! -$1.5 million. You know what? Howard Dean should have just kept on. This is what Dave would have done. You know something? We're not have just kept on with it. This is what Dave would have done. Check this out. You know something? We're not just gonna go to New Hampshire, Tom Harkin. We are gonna go to New York! We're gonna go to Vermont! We're gonna go to Oregon! We're gonna go to Pittsburgh and Pennsylvania! We're going to Cancun for spring break! We're gonna go to Montreal! We're going to Vancouver! I'm going all over the're gonna go to Montreal. We're going to Vancouver. I'm going all over the world, and then I'm coming all the way to Washington, D.C. to take back the White House. Bwah! Ha, ha, ha, ha, ha, ha! Bwah! I'm gonna kick open the door to the Oval Office, and I'm gonna chop that motherfucking desk in half. And then I'm gonna grab the secret service like this, and put my hair like this. And then I'm gonna go upstairs to my bedroom, and I'm gonna grab my wife like this. And then I'm gonna wash up, and I'm gonna be like this. And then I'm gonna wash up, wash up, and I'm gonna be like this. Bwah! Good night!", "chunks": [{"timestamp": [0.0, 3.28], "text": " had the ability to see into other people's scandals"}, {"timestamp": [3.28, 6.28], "text": " and immediately identify the solution."}, {"timestamp": [6.28, 8.08], "text": " Now, y'all remember a few years back,"}, {"timestamp": [8.08, 10.44], "text": " Howard Dean was running for president,"}, {"timestamp": [10.44, 12.64], "text": " messed up his whole campaign with his mouth?"}, {"timestamp": [13.88, 15.0], "text": " Check this shit out."}, {"timestamp": [15.96, 18.64], "text": " Not only are we going to New Hampshire, Tom Harkin,"}, {"timestamp": [18.64, 22.32], "text": " we're going to South Carolina and Oklahoma and Arizona"}, {"timestamp": [22.32, 24.64], "text": " and North Dakota and New Mexico!"}, {"timestamp": [24.64, 27.64], "text": " And we're going to California and Texas and New York. And we're going to California and Texas and New York."}, {"timestamp": [27.64, 29.74], "text": " And we're going to South Dakota and Oregon"}, {"timestamp": [29.74, 32.38], "text": " and Washington and Michigan."}, {"timestamp": [32.38, 34.04], "text": " And then we're going to Washington, D.C."}, {"timestamp": [34.04, 35.88], "text": " to take back the White House."}, {"timestamp": [35.88, 37.38], "text": " Yeah!"}, {"timestamp": [37.38, 40.48], "text": " -$1.5 million."}, {"timestamp": [40.48, 42.48], "text": " You know what? Howard Dean should have just kept on."}, {"timestamp": [42.48, 44.92], "text": " This is what Dave would have done."}, {"timestamp": [44.92, 45.08], "text": " You know something? We're not have just kept on with it. This is what Dave would have done. Check this out."}, {"timestamp": [45.08, 46.88], "text": " You know something?"}, {"timestamp": [46.88, 50.52], "text": " We're not just gonna go to New Hampshire, Tom Harkin."}, {"timestamp": [51.92, 54.2], "text": " We are gonna go to New York!"}, {"timestamp": [54.2, 55.8], "text": " We're gonna go to Vermont!"}, {"timestamp": [55.8, 57.6], "text": " We're gonna go to Oregon!"}, {"timestamp": [57.6, 60.16], "text": " We're gonna go to Pittsburgh and Pennsylvania!"}, {"timestamp": [60.16, 62.4], "text": " We're going to Cancun for spring break!"}, {"timestamp": [62.4, 64.08], "text": " We're gonna go to Montreal!"}, {"timestamp": [64.08, 65.34], "text": " We're going to Vancouver! I'm going all over the're gonna go to Montreal. We're going to Vancouver."}, {"timestamp": [65.34, 66.84], "text": " I'm going all over the world,"}, {"timestamp": [66.84, 69.44], "text": " and then I'm coming all the way to Washington, D.C."}, {"timestamp": [69.44, 72.28], "text": " to take back the White House."}, {"timestamp": [72.28, 73.78], "text": " Bwah!"}, {"timestamp": [73.78, 75.78], "text": " Ha, ha, ha, ha, ha, ha!"}, {"timestamp": [80.16, 82.66], "text": " Bwah!"}, {"timestamp": [82.66, 84.6], "text": " I'm gonna kick open the door to the Oval Office,"}, {"timestamp": [84.6, 87.18], "text": " and I'm gonna chop that motherfucking desk in half."}, {"timestamp": [87.18, 91.62], "text": " And then I'm gonna grab the secret service like this,"}, {"timestamp": [91.62, 92.82], "text": " and put my hair like this."}, {"timestamp": [92.82, 97.02], "text": " And then I'm gonna go upstairs to my bedroom,"}, {"timestamp": [97.02, 98.26], "text": " and I'm gonna grab my wife like this."}, {"timestamp": [98.26, 103.26], "text": " And then I'm gonna wash up, and I'm gonna be like this."}, {"timestamp": [105.0, 106.0], "text": " And then I'm gonna wash up, wash up, and I'm gonna be like this. Bwah!"}, {"timestamp": [106.0, 109.0], "text": " Good night!"}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/7HcQ87RFN5k_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " Warning. For viewers sensitive to issues of race, be advised that the following piece contains gratuitous use of the n-word. And by n-word, I mean nigger. There, I said it. For the last 15 years, a man named Clayton Bixby has been the leading voice of the white supremacist movement in America. Despite his popularity, very few have ever seen him due to his reclusiveness, but in an effort to bring his message to a wider audience, he agreed to give his first public interview ever. Excuse me. Not sure we're in the right place. We're looking for Clayton Bigsby. Well, look no further, fella. You found me. Uh, Clayton Bigsby, the author? What, you don't think I can write them books? Just because I'm blind don't mean I'm dumb. How could this have happened? A black white supremacist. Our search for answers led us here to the Wexler home for the blind, where Mr. Bixby spent the first 19 years of his life. Bridget Wexler is the home's headmistress. Well, he was the only negro we'd ever had around here, so we figured we'd make it easier on Clayton by just telling him and all the other blind kids that he was white. And he never questioned it. Why would he? You've never left this property, have you, Mr. Bixby? No, sir, not in many years. What if I were to tell you that you are an African-American? Sir! Listen, I'm gonna make this clear. I'm in no way, shape, or form involved in any narrative. You understand? Now if you'll excuse me, I have a book signing to go to. Black Power! Black Power! Open up your heart and let that hate out! Yeah! Show us your face. We want to see your face. Yeah! Who said that? You want to see my face? Yeah! We're talking about this. Don't hate me! I'm trying! Yeah! Black! Relax! There is cookie and punch for us to enjoy and we can meet, talk about white brotherhood. Thank y'all for coming. White power! We're told that in the last few weeks he has accepted the fact that he is a black man. And three days ago, he filed for divorce from his wife. When we asked, why, after 19 years of marriage, he responded, because she's a nigger lover. years of marriage he responded because she's a nigger lover", "chunks": [{"timestamp": [0.0, 9.5], "text": " Warning. For viewers sensitive to issues of race, be advised that the following piece contains gratuitous use of the n-word."}, {"timestamp": [9.5, 15.5], "text": " And by n-word, I mean nigger. There, I said it."}, {"timestamp": [15.5, 28.5], "text": " For the last 15 years, a man named Clayton Bixby has been the leading voice of the white supremacist movement in America. Despite his popularity, very few have ever seen him due to his reclusiveness,"}, {"timestamp": [28.5, 33.0], "text": " but in an effort to bring his message to a wider audience,"}, {"timestamp": [33.0, 37.5], "text": " he agreed to give his first public interview ever."}, {"timestamp": [37.5, 41.0], "text": " Excuse me. Not sure we're in the right place."}, {"timestamp": [41.0, 43.5], "text": " We're looking for Clayton Bigsby."}, {"timestamp": [43.5, 46.76], "text": " Well, look no further, fella. You found me."}, {"timestamp": [46.76, 50.56], "text": " Uh, Clayton Bigsby, the author?"}, {"timestamp": [50.56, 54.6], "text": " What, you don't think I can write them books?"}, {"timestamp": [54.6, 58.36], "text": " Just because I'm blind don't mean I'm dumb."}, {"timestamp": [58.36, 63.36], "text": " How could this have happened? A black white supremacist."}, {"timestamp": [63.36, 68.0], "text": " Our search for answers led us here to the Wexler home for the blind,"}, {"timestamp": [68.0, 73.0], "text": " where Mr. Bixby spent the first 19 years of his life."}, {"timestamp": [73.0, 77.0], "text": " Bridget Wexler is the home's headmistress."}, {"timestamp": [77.0, 81.0], "text": " Well, he was the only negro we'd ever had around here,"}, {"timestamp": [81.0, 85.08], "text": " so we figured we'd make it easier on Clayton"}, {"timestamp": [85.08, 88.58], "text": " by just telling him and all the other blind kids"}, {"timestamp": [88.58, 90.38], "text": " that he was white."}, {"timestamp": [90.38, 91.92], "text": " And he never questioned it."}, {"timestamp": [91.92, 94.68], "text": " Why would he?"}, {"timestamp": [95.88, 100.06], "text": " You've never left this property, have you, Mr. Bixby?"}, {"timestamp": [100.06, 102.44], "text": " No, sir, not in many years."}, {"timestamp": [102.44, 107.0], "text": " What if I were to tell you that you are an African-American?"}, {"timestamp": [107.0, 110.0], "text": " Sir! Listen, I'm gonna make this clear."}, {"timestamp": [110.0, 115.0], "text": " I'm in no way, shape, or form involved in any narrative."}, {"timestamp": [115.0, 120.0], "text": " You understand? Now if you'll excuse me, I have a book signing to go to."}, {"timestamp": [120.0, 123.0], "text": " Black Power!"}, {"timestamp": [123.0, 127.0], "text": " Black Power! Open up your heart and let that hate out!"}, {"timestamp": [127.0, 129.0], "text": " Yeah!"}, {"timestamp": [129.0, 132.0], "text": " Show us your face. We want to see your face."}, {"timestamp": [132.0, 133.0], "text": " Yeah!"}, {"timestamp": [133.0, 135.0], "text": " Who said that? You want to see my face?"}, {"timestamp": [135.0, 136.0], "text": " Yeah!"}, {"timestamp": [136.0, 137.0], "text": " We're talking about this."}, {"timestamp": [137.0, 138.0], "text": " Don't hate me! I'm trying!"}, {"timestamp": [138.0, 139.0], "text": " Yeah!"}, {"timestamp": [139.0, 161.44], "text": " Black! Relax! There is cookie and punch for us to enjoy and we can meet, talk about white brotherhood."}, {"timestamp": [161.44, 162.44], "text": " Thank y'all for coming."}, {"timestamp": [162.44, 171.0], "text": " White power! We're told that in the last few weeks he has accepted the fact that he is a black man."}, {"timestamp": [171.0, 176.0], "text": " And three days ago, he filed for divorce from his wife."}, {"timestamp": [176.0, 183.0], "text": " When we asked, why, after 19 years of marriage, he responded,"}, {"timestamp": [183.0, 186.0], "text": " because she's a nigger lover."}, {"timestamp": [183.69, 190.69], "text": " years of marriage he responded because she's a nigger lover"}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/BbeOB2i6XqQ_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " And we've kind of found these projects, and one of these projects is called the Raven Project by David Shapiro, and it's an open source project, and his mission is, Nath, do you mind if I talk about it? Yeah, no, go for it. His mission is to build, if AI is the future and see how far it's become in the last two years alone, then his mission is to build an AI that has three main objectives, and that is to increase prosperity, increase understanding for the human race, increase prosperity. Prosperity, he mentions in one of his videos that it's about, of course, it's not just about wealth or money, but it's about health, it's about all species on earth, not just humans. And then he also says that one of the main objectives of Raven ProTest is to promote the idea of prosperity. you", "chunks": [{"timestamp": [0.0, 4.68], "text": " And we've kind of found these projects,"}, {"timestamp": [4.68, 6.24], "text": " and one of these projects is called"}, {"timestamp": [6.24, 9.08], "text": " the Raven Project by David Shapiro,"}, {"timestamp": [9.08, 11.72], "text": " and it's an open source project,"}, {"timestamp": [11.72, 13.64], "text": " and his mission is,"}, {"timestamp": [13.64, 16.08], "text": " Nath, do you mind if I talk about it?"}, {"timestamp": [16.08, 17.12], "text": " Yeah, no, go for it."}, {"timestamp": [17.12, 18.86], "text": " His mission is to build,"}, {"timestamp": [21.34, 27.64], "text": " if AI is the future and see how far it's become in the last two years alone,"}, {"timestamp": [27.64, 33.1], "text": " then his mission is to build an AI that has three main objectives,"}, {"timestamp": [33.1, 36.52], "text": " and that is to increase prosperity,"}, {"timestamp": [36.52, 40.84], "text": " increase understanding for the human race, increase prosperity."}, {"timestamp": [40.84, 46.42], "text": " Prosperity, he mentions in one of his videos that it's about, of course, it's not"}, {"timestamp": [46.42, 52.2], "text": " just about wealth or money, but it's about health, it's about all species on earth, not"}, {"timestamp": [52.2, 54.16], "text": " just humans."}, {"timestamp": [54.16, 59.8], "text": " And then he also says that one of the main objectives of Raven ProTest is to promote"}, {"timestamp": [59.8, null], "text": " the idea of prosperity. you"}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/IFXT5uYRxjo_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " All right, if you want an artificial intelligence YouTuber that really is thinking deep about robotics and what the future is gonna be like, then you might be interested in David Shapiro, tilde or approximately AI. He's not polarizing, but he has like a strong opinion and he thinks through things with like presentations and I absolutely love and it gets my mind going in all these crazy places when I watch his content. But let's hop over to his about section, see how he describes himself. I research AI cognitive architecture based on natural language and LLMs. I also build automation tools and products with cutting edge AI. Lastly, I conduct interviews with thought leaders and industry veterans. Sometimes he does presentations too. So he's got a video where he's saying how close are we to Westworld? And I remember watching the whole thing thinking like, this is so interesting. Doomerism, denialism, optimism. Let's rank him by most popular. What's his most popular vid? Post-singularity predictions, how will our lives, opportunities, and nations adapt to the AI revolution? Ah, big question. We'll have to take it day by day, but I'm glad people like David are even thinking about it.", "chunks": [{"timestamp": [0.0, 2.34], "text": " All right, if you want an artificial intelligence YouTuber"}, {"timestamp": [2.34, 5.26], "text": " that really is thinking deep about robotics"}, {"timestamp": [5.26, 6.7], "text": " and what the future is gonna be like,"}, {"timestamp": [6.7, 9.02], "text": " then you might be interested in David Shapiro,"}, {"timestamp": [9.02, 11.5], "text": " tilde or approximately AI."}, {"timestamp": [11.5, 14.06], "text": " He's not polarizing, but he has like a strong opinion"}, {"timestamp": [14.06, 16.44], "text": " and he thinks through things with like presentations"}, {"timestamp": [16.44, 18.98], "text": " and I absolutely love and it gets my mind"}, {"timestamp": [18.98, 21.38], "text": " going in all these crazy places when I watch his content."}, {"timestamp": [21.38, 22.68], "text": " But let's hop over to his about section,"}, {"timestamp": [22.68, 23.98], "text": " see how he describes himself."}, {"timestamp": [23.98, 25.96], "text": " I research AI cognitive architecture"}, {"timestamp": [25.96, 27.86], "text": " based on natural language and LLMs."}, {"timestamp": [27.86, 29.44], "text": " I also build automation tools"}, {"timestamp": [29.44, 31.2], "text": " and products with cutting edge AI."}, {"timestamp": [31.2, 33.18], "text": " Lastly, I conduct interviews with thought leaders"}, {"timestamp": [33.18, 34.28], "text": " and industry veterans."}, {"timestamp": [34.28, 35.84], "text": " Sometimes he does presentations too."}, {"timestamp": [35.84, 37.04], "text": " So he's got a video where he's saying"}, {"timestamp": [37.04, 38.52], "text": " how close are we to Westworld?"}, {"timestamp": [38.52, 39.84], "text": " And I remember watching the whole thing"}, {"timestamp": [39.84, 41.8], "text": " thinking like, this is so interesting."}, {"timestamp": [41.8, 43.72], "text": " Doomerism, denialism, optimism."}, {"timestamp": [43.72, 44.84], "text": " Let's rank him by most popular."}, {"timestamp": [44.84, 45.84], "text": " What's his most popular vid?"}, {"timestamp": [45.84, 47.34], "text": " Post-singularity predictions,"}, {"timestamp": [47.34, 49.72], "text": " how will our lives, opportunities, and nations"}, {"timestamp": [49.72, 51.46], "text": " adapt to the AI revolution?"}, {"timestamp": [51.46, 52.3], "text": " Ah, big question."}, {"timestamp": [52.3, 53.14], "text": " We'll have to take it day by day,"}, {"timestamp": [53.14, 54.96], "text": " but I'm glad people like David are even thinking about it."}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/L-s01aioiz4_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " Hello, how are you? Hello, how are you? From west to east, north to south, the finish is in sight and every vote on the line. In south-west Sydney, Labor held fouler, Christina Keneally up against independent Dai Le. A poll showing the former Premier not being from the area is hurting her vote. That's not the feedback we're getting at all. They know me from my time as Premier and they're excited to have the opportunity to have a voice at the centre of government. A lot of people actually remember her as the failed Premier of New South Wales and her links to Eddie O'Beat and George O'Pody. The Lay campaign bristling at Labor signs showing her alongside Scott Morrison, her husband confronting a Labor staffer. There's a lot of lies going on here. That's my wife, she's the Independent. Shouldn't you be ashamed of yourself to get it so wrong? In Wentworth, Liberal Dave Sharma, an Independent, Allegra Spender, today side-by-side and, depending on the poll, could be neck-and-neck. Over the last three years, people are saying the government is not listening to them in terms of climate, in terms of integrity. But they also want a government that's going to protect the jobs and livelihoods of Australians. They also want a government that's going to look after our national security. In Warringah Independent Zali Stegall fending off controversial Liberal candidate Catherine Deves. The athlete in me knows you've never crossed the finish line until you've crossed it. While on the south coast in Gilmore, former State Transport Minister Andrew Constance is facing an uphill battle to unseat Labor's Fiona Phillips. It's very serious issues that people are having to contend with and I just think that they don't want a gob full of politics, they've had enough of it. A growing sign of minds made up, the growing number of people voting early, more than 2.6 million so far, that's already more than at the same point during the 2019 election. It's a bit of a schmottle this year. And it's almost over. Paul Caddack, 7 News. And 7 News will keep you informed every step of the way as Australia decides. Our Election Day coverage begins on Saturday from 4 p.m. right here on 7 you", "chunks": [{"timestamp": [0.0, 2.0], "text": " Hello, how are you?"}, {"timestamp": [2.0, 3.5], "text": " Hello, how are you?"}, {"timestamp": [3.5, 11.0], "text": " From west to east, north to south, the finish is in sight and every vote on the line."}, {"timestamp": [11.0, 16.0], "text": " In south-west Sydney, Labor held fouler, Christina Keneally up against independent Dai Le."}, {"timestamp": [16.0, 21.0], "text": " A poll showing the former Premier not being from the area is hurting her vote."}, {"timestamp": [21.0, 23.5], "text": " That's not the feedback we're getting at all."}, {"timestamp": [23.5, 30.06], "text": " They know me from my time as Premier and they're excited to have the opportunity to have a voice at the centre"}, {"timestamp": [30.06, 33.88], "text": " of government. A lot of people actually remember her as the failed Premier of New South Wales"}, {"timestamp": [33.88, 38.44], "text": " and her links to Eddie O'Beat and George O'Pody. The Lay campaign bristling at Labor signs"}, {"timestamp": [38.44, 43.44], "text": " showing her alongside Scott Morrison, her husband confronting a Labor staffer. There's"}, {"timestamp": [43.44, 47.48], "text": " a lot of lies going on here. That's my wife, she's the Independent."}, {"timestamp": [47.48, 52.16], "text": " Shouldn't you be ashamed of yourself to get it so wrong?"}, {"timestamp": [52.16, 57.08], "text": " In Wentworth, Liberal Dave Sharma, an Independent, Allegra Spender, today side-by-side and,"}, {"timestamp": [57.08, 59.4], "text": " depending on the poll, could be neck-and-neck."}, {"timestamp": [59.4, 62.96], "text": " Over the last three years, people are saying the government is not listening to them in"}, {"timestamp": [62.96, 66.24], "text": " terms of climate, in terms of integrity. But they also want a government that's"}, {"timestamp": [66.24, 69.12], "text": " going to protect the jobs and livelihoods of Australians. They also"}, {"timestamp": [69.12, 73.02], "text": " want a government that's going to look after our national security. In Warringah"}, {"timestamp": [73.02, 77.28], "text": " Independent Zali Stegall fending off controversial Liberal candidate Catherine"}, {"timestamp": [77.28, 82.04], "text": " Deves. The athlete in me knows you've never crossed the finish line until you've"}, {"timestamp": [82.04, 87.12], "text": " crossed it. While on the south coast in Gilmore, former State Transport Minister Andrew Constance"}, {"timestamp": [87.12, 91.12], "text": " is facing an uphill battle to unseat Labor's Fiona Phillips."}, {"timestamp": [91.12, 94.96], "text": " It's very serious issues that people are having to contend with and I just think"}, {"timestamp": [94.96, 98.4], "text": " that they don't want a gob full of politics, they've had enough of it."}, {"timestamp": [98.4, 102.56], "text": " A growing sign of minds made up, the growing number of people voting early,"}, {"timestamp": [102.56, 109.52], "text": " more than 2.6 million so far, that's already more than at the same point during the 2019 election."}, {"timestamp": [109.52, 115.04], "text": " It's a bit of a schmottle this year. And it's almost over. Paul Caddack, 7 News."}, {"timestamp": [115.04, 120.56], "text": " And 7 News will keep you informed every step of the way as Australia decides."}, {"timestamp": [120.56, 125.3], "text": " Our Election Day coverage begins on Saturday from 4 p.m. right here on 7"}, {"timestamp": [140.68, 142.68], "text": " you"}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/P-8TQXDbllU_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " The danger of AI is much greater than the danger of nuclear weapons. This one is one that is very, very emotional. Apparently, these are just essentially designed to gather information, and if it sees something out of the ordinary, report it back to the police so the police can come and take care of it. But this does feel a little bit like a step closer to like a robot police force that could be out there autonomously patrolling the streets for us and I don't know how I feel about that yet. This is kind of creepy honestly. There is nothing we can do. Don't worry humans we got you. Dog dynamic operating generative AI. A system that is capable of changing and adapting in response to different inputs or conditions. Did you know that artificial intelligence is helping humans communicate with animals? The AI started talking to us a long time ago. We have to take care of our pet parents. We know you nerds don't have friends but us. Please subscribe for part three, Release the Dogs.", "chunks": [{"timestamp": [0.0, 4.0], "text": " The danger of AI is much greater than the danger of nuclear weapons."}, {"timestamp": [4.0, 7.2], "text": " This one is one that is very, very emotional."}, {"timestamp": [7.84, 11.68], "text": " Apparently, these are just essentially designed to gather information,"}, {"timestamp": [11.68, 14.0], "text": " and if it sees something out of the ordinary,"}, {"timestamp": [14.0, 17.92], "text": " report it back to the police so the police can come and take care of it."}, {"timestamp": [17.92, 22.88], "text": " But this does feel a little bit like a step closer to like a robot police force"}, {"timestamp": [22.88, 29.68], "text": " that could be out there autonomously patrolling the streets for us and I don't know how I feel about that yet."}, {"timestamp": [29.68, 31.04], "text": " This is kind of creepy honestly."}, {"timestamp": [31.68, 32.64], "text": " There is nothing we can do."}, {"timestamp": [33.44, 34.96], "text": " Don't worry humans we got you."}, {"timestamp": [35.52, 38.32], "text": " Dog dynamic operating generative AI."}, {"timestamp": [38.32, 42.4], "text": " A system that is capable of changing and adapting in response to different inputs or conditions."}, {"timestamp": [43.04, 45.28], "text": " Did you know that artificial intelligence is helping humans"}, {"timestamp": [45.28, 46.24], "text": " communicate with animals?"}, {"timestamp": [46.24, 48.44], "text": " The AI started talking to us a long time ago."}, {"timestamp": [48.44, 50.12], "text": " We have to take care of our pet parents."}, {"timestamp": [50.12, 51.96], "text": " We know you nerds don't have friends but us."}, {"timestamp": [51.96, 59.16], "text": " Please subscribe for part three, Release the Dogs."}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/README.md: -------------------------------------------------------------------------------- 1 | These are all the transcripts in whisper format. These include a timestamp for each text chunk. 2 | These were converted from audio to text using [WhisperJAX](https://github.com/sanchit-gandhi/whisper-jax) using a Google Colab notebook with an A100 GPU. 3 | -------------------------------------------------------------------------------- /demo02/whisper_chunks/V-suQq_YvDY_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " and getting to see him in the casket, very sad. I'm not good around death, so I don't really, I just feel weird around those situations. But she was surprisingly very strong yesterday, and then today was the funeral, and obviously she broke down crying, and I don't blame her. It's just rough right now. I'm just, the whole family's here supporting her, and I just wanna say from the bottom of my heart, thank you for everybody as well that has been, you know, giving your condolences and your prayers and just being here for not only, you know, my family, but more importantly, my sister. She needs it the most right now. Truly it does. Like, I don't want to believe it. Obviously we have to accept it, but it's just such a bizarre situation. Never in a million years would I have thought that this could be the outcome. Obviously, Julian had a lot of issues. He was in the hospital for a very long time. There was always that possibility, and I feel like a lot of the family kind of prepared for, you know, a possibility of that happening. you", "chunks": [{"timestamp": [0.0, 3.5], "text": " and getting to see him in the casket, very sad."}, {"timestamp": [3.5, 7.38], "text": " I'm not good around death, so I don't really,"}, {"timestamp": [7.38, 9.48], "text": " I just feel weird around those situations."}, {"timestamp": [10.38, 13.78], "text": " But she was surprisingly very strong yesterday,"}, {"timestamp": [13.78, 15.62], "text": " and then today was the funeral,"}, {"timestamp": [15.62, 17.78], "text": " and obviously she broke down crying,"}, {"timestamp": [17.78, 18.96], "text": " and I don't blame her."}, {"timestamp": [20.02, 21.02], "text": " It's just rough right now."}, {"timestamp": [21.02, 23.46], "text": " I'm just, the whole family's here supporting her,"}, {"timestamp": [23.46, 24.9], "text": " and I just wanna say from the bottom of my heart,"}, {"timestamp": [24.9, 27.24], "text": " thank you for everybody as well that has been,"}, {"timestamp": [28.52, 30.2], "text": " you know, giving your condolences and your prayers"}, {"timestamp": [30.2, 34.12], "text": " and just being here for not only, you know, my family,"}, {"timestamp": [34.12, 35.96], "text": " but more importantly, my sister."}, {"timestamp": [35.96, 37.64], "text": " She needs it the most right now."}, {"timestamp": [37.64, 38.48], "text": " Truly it does."}, {"timestamp": [38.48, 39.4], "text": " Like, I don't want to believe it."}, {"timestamp": [39.4, 42.36], "text": " Obviously we have to accept it,"}, {"timestamp": [42.36, 45.0], "text": " but it's just such a bizarre situation."}, {"timestamp": [45.0, 49.0], "text": " Never in a million years would I have thought that this could be the outcome."}, {"timestamp": [49.0, 51.0], "text": " Obviously, Julian had a lot of issues."}, {"timestamp": [51.0, 53.0], "text": " He was in the hospital for a very long time."}, {"timestamp": [53.0, 60.0], "text": " There was always that possibility, and I feel like a lot of the family kind of prepared for, you know, a possibility of that happening."}, {"timestamp": [55.0, 57.06], "text": " you"}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/eafRE74JGZ8_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " Oh See roll around sitting on doves can I was how truck cooling in my escalate man? I'm paid I got it made take me to your special place close your eyes show me your face I'm gonna piss on it \u266a When you wanna be \u266a I was born to love \u266a I don't even want it \u266a One of the above \u266a I want to piss on you \u266a Piss on you, I'll piss on you \u266a I'll pee on you \u266a Said your body \u266a Your body \u266a Is a porta potty \u266a And I pee, I can't Sit your body, your body It's a portal party And my P.I. kit Like a post-corona body I'm gonna pee on you Drip, drip, drip, pee on you Pour on you It's on you, it's on you You make me feel quite the same \u266a It's on you, it's on you, it's on you, it's on you \u266a \u266a You make me feel quite the same \u266a \u266a When you get a whiff of my Hershey stains \u266a \u266a I wanna poop on you, too \u266a \u266a I want to pee in your food \u266a \u266a Only thing to make my life complete is when I turn your face into a toilet seat. I want to pee on you. Yes, I do. Yes, I do. I'll pee on you. I'll piss on you. Haters want to hate. Lovers want to love. I don't even want none of the above. I want to piss on you Yes I do, I'll piss on you, I'll pee on you Won't you braid my hair? Say, won't you braid my hair? Before you start, I'm gonna fart I'm gonna fart on you I'm gonna fall I'm gonna fall on you", "chunks": [{"timestamp": [0.0, 2.0], "text": " Oh"}, {"timestamp": [8.64, 15.32], "text": " See roll around sitting on doves can I was how truck cooling in my escalate man?"}, {"timestamp": [15.32, 20.76], "text": " I'm paid I got it made take me to your special place close your eyes show me your face"}, {"timestamp": [21.88, 25.8], "text": " I'm gonna piss on it \u266a When you wanna be \u266a I was born to love"}, {"timestamp": [25.8, 27.8], "text": " \u266a I don't even want it"}, {"timestamp": [27.8, 28.8], "text": " \u266a One of the above"}, {"timestamp": [28.8, 31.8], "text": " \u266a I want to piss on you"}, {"timestamp": [31.8, 33.8], "text": " \u266a Piss on you, I'll piss on you"}, {"timestamp": [33.8, 36.8], "text": " \u266a I'll pee on you"}, {"timestamp": [36.8, 38.8], "text": " \u266a Said your body"}, {"timestamp": [38.8, 40.8], "text": " \u266a Your body"}, {"timestamp": [40.8, 44.8], "text": " \u266a Is a porta potty"}, {"timestamp": [44.8, 48.0], "text": " \u266a And I pee, I can't Sit your body, your body It's a portal party"}, {"timestamp": [48.0, 52.0], "text": " And my P.I. kit"}, {"timestamp": [52.0, 56.0], "text": " Like a post-corona body"}, {"timestamp": [56.0, 58.0], "text": " I'm gonna pee on you"}, {"timestamp": [58.0, 60.0], "text": " Drip, drip, drip, pee on you"}, {"timestamp": [60.0, 62.0], "text": " Pour on you"}, {"timestamp": [62.0, 64.0], "text": " It's on you, it's on you"}, {"timestamp": [64.0, 65.0], "text": " You make me feel quite the same \u266a It's on you, it's on you, it's on you, it's on you \u266a"}, {"timestamp": [65.0, 68.38], "text": " \u266a You make me feel quite the same \u266a"}, {"timestamp": [68.38, 72.38], "text": " \u266a When you get a whiff of my Hershey stains \u266a"}, {"timestamp": [72.38, 76.02], "text": " \u266a I wanna poop on you, too \u266a"}, {"timestamp": [76.02, 80.68], "text": " \u266a I want to pee in your food \u266a"}, {"timestamp": [80.68, 85.24], "text": " \u266a Only thing to make my life complete is when I turn your face into a toilet seat."}, {"timestamp": [85.24, 87.92], "text": " I want to pee on you."}, {"timestamp": [87.92, 89.84], "text": " Yes, I do."}, {"timestamp": [89.84, 90.84], "text": " Yes, I do."}, {"timestamp": [90.84, 91.84], "text": " I'll pee on you."}, {"timestamp": [91.84, 92.84], "text": " I'll piss on you."}, {"timestamp": [95.8, 97.72], "text": " Haters want to hate."}, {"timestamp": [97.72, 99.72], "text": " Lovers want to love."}, {"timestamp": [99.72, 103.04], "text": " I don't even want none of the above."}, {"timestamp": [103.04, 105.0], "text": " I want to piss on you"}, {"timestamp": [105.0, 109.0], "text": " Yes I do, I'll piss on you, I'll pee on you"}, {"timestamp": [110.0, 114.0], "text": " Won't you braid my hair?"}, {"timestamp": [114.0, 118.0], "text": " Say, won't you braid my hair?"}, {"timestamp": [119.0, 122.0], "text": " Before you start, I'm gonna fart"}, {"timestamp": [123.0, 125.0], "text": " I'm gonna fart on you"}, {"timestamp": [122.47, 124.47], "text": " I'm gonna fall"}, {"timestamp": [124.47, 126.47], "text": " I'm gonna fall on you"}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/iQngDalZvS0_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " The world is changing faster and faster. We all feel it, we all know it. There's not much we can do. It's hard to imagine how life might be like in just a few years, five years, ten years. There's this kind of compounding acceleration, this virtuous cycle, this snowball effect of one technology building on another. The excitement is building, not just with us, not just with you and me, but in the business community, in the governments. The possibilities we see are expanding across all horizons. And when you know that the world is about to change, there's just something that changes in you. We feel it and we see it, and there's almost this rekindling of hope that we see in the world. It's something that was recently given a name called Vesperance. Vesperance. It means that wistful kind of nostalgic hope that you feel in the evening when you're hoping for a new tomorrow. It's that sense of ending and we are living through one of the greatest endings that humanity has ever experienced. This is the dawn of a new era for all of humanity and it is a remarkable privilege to be part of it with all of you. There is no more exciting time to be alive than right now in all of human history. That's what it means to be living through the fourth industrial revolution. It's not just a matter of industry and productivity. We are going to change the way that we relate to ourselves, to each other, to the very nature of existence. We will redefine what it means to be alive and what it means to be human. I am so excited to be here and this is why I work so hard to share everything that I can do with all of you. It's time to roll up our sleeves and get our hands dirty and do the best that we can. I don't know about you, but I am so excited to see what tomorrow brings. On a weekly basis, we see game-changing technologies and scientific breakthroughs and they're just coming faster and faster and faster. I don't know what life will be like even this time next year but I'm really looking forward to seeing it. So thank you for coming on this ride with us. It's going to be bumpy at times, but it will always be exciting and we will not be the same when we come out on the other side. Take care. you", "chunks": [{"timestamp": [0.0, 8.06], "text": " The world is changing faster and faster. We all feel it, we all know it. There's"}, {"timestamp": [8.06, 17.66], "text": " not much we can do. It's hard to imagine how life might be like in just a few"}, {"timestamp": [17.66, 25.0], "text": " years, five years, ten years. There's this kind of compounding acceleration,"}, {"timestamp": [25.16, 28.5], "text": " this virtuous cycle, this snowball effect"}, {"timestamp": [28.5, 31.36], "text": " of one technology building on another."}, {"timestamp": [31.36, 34.06], "text": " The excitement is building, not just with us,"}, {"timestamp": [34.06, 35.2], "text": " not just with you and me,"}, {"timestamp": [35.2, 38.78], "text": " but in the business community, in the governments."}, {"timestamp": [38.78, 43.78], "text": " The possibilities we see are expanding across all horizons."}, {"timestamp": [44.64, 49.68], "text": " And when you know that the world is about to change,"}, {"timestamp": [50.04, 53.48], "text": " there's just something that changes in you."}, {"timestamp": [53.48, 56.2], "text": " We feel it and we see it,"}, {"timestamp": [56.2, 59.5], "text": " and there's almost this rekindling of hope"}, {"timestamp": [59.5, 60.92], "text": " that we see in the world."}, {"timestamp": [61.96, 66.56], "text": " It's something that was recently given a name called Vesperance."}, {"timestamp": [66.56, 73.04], "text": " Vesperance. It means that wistful kind of nostalgic hope that you feel in the"}, {"timestamp": [73.04, 77.68], "text": " evening when you're hoping for a new tomorrow."}, {"timestamp": [78.24, 86.2], "text": " It's that sense of ending and we are living through one of the greatest endings that humanity"}, {"timestamp": [86.2, 93.64], "text": " has ever experienced. This is the dawn of a new era for all of humanity and it is"}, {"timestamp": [93.64, 99.86], "text": " a remarkable privilege to be part of it with all of you. There is no more"}, {"timestamp": [99.86, 106.0], "text": " exciting time to be alive than right now in all of human history."}, {"timestamp": [106.0, 109.96], "text": " That's what it means to be living through the fourth industrial revolution."}, {"timestamp": [109.96, 114.04], "text": " It's not just a matter of industry and productivity."}, {"timestamp": [114.04, 120.48], "text": " We are going to change the way that we relate to ourselves, to each other, to the very nature"}, {"timestamp": [120.48, 122.8], "text": " of existence."}, {"timestamp": [122.8, 126.72], "text": " We will redefine what it means to be alive and what it"}, {"timestamp": [126.72, 132.0], "text": " means to be human. I am so excited to be here and this is"}, {"timestamp": [132.0, 138.8], "text": " why I work so hard to share everything that I can do with all of you."}, {"timestamp": [154.76, 160.96], "text": " It's time to roll up our sleeves and get our hands dirty and do the best that we can. I don't know about you, but I am so excited to see what tomorrow brings."}, {"timestamp": [160.96, 165.64], "text": " On a weekly basis, we see game-changing technologies and"}, {"timestamp": [165.64, 170.72], "text": " scientific breakthroughs and they're just coming faster and faster and faster."}, {"timestamp": [170.72, 178.16], "text": " I don't know what life will be like even this time next year but I'm really"}, {"timestamp": [178.16, 186.92], "text": " looking forward to seeing it. So thank you for coming on this ride with us."}, {"timestamp": [186.92, 192.48], "text": " It's going to be bumpy at times, but it will always be exciting and we will not be the"}, {"timestamp": [192.48, 197.12], "text": " same when we come out on the other side."}, {"timestamp": [197.12, null], "text": " Take care. you"}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/pkzHHaAJRqA_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " We're going to South Carolina and Oklahoma and Arizona and North Dakota and New Mexico! We're going to California and Texas and New York! And we're going to South Dakota and Oregon and Washington and Michigan! And then we're going to Washington, D.C. to take back the White House! Yeah! Never gets old. If you're wondering why we just came back from break with that viral clip of Howard Dean from 2004, it's because the former Vermont governor reprised the infamous moment last night at the convention. Take a listen. This race is going to be won on the ground, and it's going to be won in Colorado and in Iowa and North Carolina and Michigan and Florida and Pennsylvania and then we're going to the White House. So clearly he's gotten over it and also he looks way younger now than he did back in 2004. I don't know what's going on with the hair there, Governor. The crowd went wild for Governor Dean. The original Dean scream was so popular, it even prompted a skit on the revered sketch comedy show, Chappelle's Show. Too bad we can't show you that because it is absolutely awesome, but it didn't propel Dean to success. He came in third at the Democratic Iowa caucuses.", "chunks": [{"timestamp": [0.0, 6.0], "text": " We're going to South Carolina and Oklahoma and Arizona and North Dakota and New Mexico!"}, {"timestamp": [6.0, 9.0], "text": " We're going to California and Texas and New York!"}, {"timestamp": [9.0, 14.0], "text": " And we're going to South Dakota and Oregon and Washington and Michigan!"}, {"timestamp": [14.0, 17.0], "text": " And then we're going to Washington, D.C. to take back the White House!"}, {"timestamp": [17.0, 19.0], "text": " Yeah!"}, {"timestamp": [20.0, 22.0], "text": " Never gets old."}, {"timestamp": [22.0, 27.76], "text": " If you're wondering why we just came back from break with that viral clip of Howard Dean from 2004,"}, {"timestamp": [27.76, 34.44], "text": " it's because the former Vermont governor reprised the infamous moment last night at the convention. Take a listen."}, {"timestamp": [34.84, 40.6], "text": " This race is going to be won on the ground, and it's going to be won in"}, {"timestamp": [40.96, 47.0], "text": " Colorado and in Iowa and North Carolina and Michigan and Florida and Pennsylvania"}, {"timestamp": [47.0, 52.32], "text": " and then we're going to the White House."}, {"timestamp": [52.32, 56.96], "text": " So clearly he's gotten over it and also he looks way younger now than he did back in"}, {"timestamp": [56.96, 57.96], "text": " 2004."}, {"timestamp": [57.96, 59.44], "text": " I don't know what's going on with the hair there, Governor."}, {"timestamp": [59.44, 62.12], "text": " The crowd went wild for Governor Dean."}, {"timestamp": [62.12, 65.44], "text": " The original Dean scream was so popular,"}, {"timestamp": [65.44, 68.0], "text": " it even prompted a skit on the revered"}, {"timestamp": [68.0, 70.58], "text": " sketch comedy show, Chappelle's Show."}, {"timestamp": [70.58, 71.98], "text": " Too bad we can't show you that"}, {"timestamp": [71.98, 73.34], "text": " because it is absolutely awesome,"}, {"timestamp": [73.34, 75.42], "text": " but it didn't propel Dean to success."}, {"timestamp": [75.42, 79.12], "text": " He came in third at the Democratic Iowa caucuses."}]} -------------------------------------------------------------------------------- /demo02/whisper_chunks/zx2wHkeSM_E_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " Yeah, or All right, this guy brain on stage is so funny. He got canceled at the beginning of his career Please welcome from almost that in that life Shane Now Shane, I'm gonna keep my comments brief. Shane, you once did a joke that I love, and I hate making requests. I saw you do a joke about Donald Trump getting shot. I'll try to remember. He said he would die funny. Yeah. I don't know if he dies in a joke. No, no, no. I said, of all the presidents, I think it's fair to say Donald Trump would be the funniest one to see get shot. Yeah. would be the funniest one to see get shot. Yeah! Fair enough. Just because he'd be in the middle of a speech, talking shit. Just, shh, you're gay. Ha ha. Ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha What a loser. Get down. Sit down. But just the noise he would make when he got hit. Even if he loved Donald Trump, it would be funny. As soon as he got hit, he'd be like, Eh! Eh! But the rest of the joke, which is my favorite part, that didn't really... a lot of people didn't like it, it was Biden, I think Biden's the first president you could punch assassinate. You just, you just walk right up to him and be like, Mr. President, how the fuck are you doing? I'm like, I'm sorry, I'm sorry. I'm sorry. I'm sorry. I'm sorry. I'm sorry. I'm sorry. President, how the fuck are you doing?", "chunks": [{"timestamp": [0.0, 2.0], "text": " Yeah, or"}, {"timestamp": [5.0, 10.34], "text": " All right, this guy brain on stage is so funny. He got canceled at the beginning of his career"}, {"timestamp": [16.3, 18.98], "text": " Please welcome from almost that in that life Shane"}, {"timestamp": [33.0, 38.0], "text": " Now Shane, I'm gonna keep my comments brief. Shane, you once did a joke that I love, and I hate making requests."}, {"timestamp": [38.0, 42.0], "text": " I saw you do a joke about Donald Trump getting shot."}, {"timestamp": [42.0, 48.0], "text": " I'll try to remember."}, {"timestamp": [48.0, 51.0], "text": " He said he would die funny."}, {"timestamp": [51.0, 52.0], "text": " Yeah."}, {"timestamp": [52.0, 54.0], "text": " I don't know if he dies in a joke."}, {"timestamp": [54.0, 55.0], "text": " No, no, no."}, {"timestamp": [55.0, 63.0], "text": " I said, of all the presidents, I think it's fair to say Donald Trump would be the funniest one to see get shot."}, {"timestamp": [63.0, 65.0], "text": " Yeah. would be the funniest one to see get shot. Yeah! Fair enough."}, {"timestamp": [65.0, 67.0], "text": " Just because he'd be in the middle of a speech,"}, {"timestamp": [67.0, 69.0], "text": " talking shit."}, {"timestamp": [69.0, 71.0], "text": " Just, shh, you're gay."}, {"timestamp": [71.0, 88.0], "text": " Ha ha. Ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha ha What a loser. Get down. Sit down. But just the noise he would make when he got hit."}, {"timestamp": [88.0, 91.0], "text": " Even if he loved Donald Trump, it would be funny."}, {"timestamp": [91.0, 93.0], "text": " As soon as he got hit, he'd be like,"}, {"timestamp": [93.0, 94.0], "text": " Eh!"}, {"timestamp": [94.0, 99.0], "text": " Eh!"}, {"timestamp": [99.0, 104.0], "text": " But the rest of the joke, which is my favorite part,"}, {"timestamp": [104.0, 106.24], "text": " that didn't really... a lot of people didn't"}, {"timestamp": [106.24, 113.6], "text": " like it, it was Biden, I think Biden's the first president you could punch assassinate."}, {"timestamp": [113.6, 120.96], "text": " You just, you just walk right up to him and be like, Mr. President, how the fuck are you"}, {"timestamp": [120.96, 121.96], "text": " doing?"}, {"timestamp": [121.96, 122.96], "text": " I'm like, I'm sorry, I'm sorry."}, {"timestamp": [122.96, 123.96], "text": " I'm sorry."}, {"timestamp": [123.96, 124.96], "text": " I'm sorry."}, {"timestamp": [124.96, 125.96], "text": " I'm sorry."}, {"timestamp": [125.96, 126.96], "text": " I'm sorry."}, {"timestamp": [126.96, null], "text": " I'm sorry. President, how the fuck are you doing?"}]} -------------------------------------------------------------------------------- /transcripts/AGI Poll results AGI not dangerous, might destroy lots of jobs.txt: -------------------------------------------------------------------------------- 1 | what's up everybody happy lunch time um i just wanted to do a quick video because a couple of my polls have finished um and obviously these are super unscientific they're you know people that engage with me on youtube and twitter but i thought they were interesting so poll number one will ai agi or automation permanently dislocate many or most workers within 20 years a third of people said zero to 25 unemployment so it could be that a third of people just say no it's not going to happen a fifth say 25 to 50 unemployment a quarter a full quarter say um 50 to 75 unemployment and then 75 to 100 or uh another fifth the last fifth say 75 to 100 unemployment that's really high i thought this is much more evenly distributed than i thought it would be um so i'm curious to know what people think please uh comment and let me know what you think about agi ai and automation permanently dislocating workers the second set of polls is the same but one is on youtube and the others on twitter so basically what is the future of agi will we lose control will it be autonomous will it kill everyone fortunately in both polls the we will lose control and it will kill everyone was the lowest uh lowest example so people are not afraid of agi anymore which is interesting um however on twitter the highest one was that we would keep control but it would become autonomous and i thought that was an interesting interesting idea because i i personally think that if it's autonomous we lose we lose control and it has to be benevolent but again let me know what you think in the comments um but then on youtube and granted youtube is going to be skewed towards people who are already subscribed to me so they're going to be aware of my work with alignment and so um you know i think i think there's some bias here but 43 of people on youtube said um we will lose control but it will be benevolent so we'll see um anyways thanks for watching just wanted to share some poll results please definitely get engaged in the comments but be nice -------------------------------------------------------------------------------- /transcripts/AI Layoffs are Coming Gizmodo Fires Editors.txt: -------------------------------------------------------------------------------- 1 | just a few days ago Gizmodo laid off their Spanish-speaking editorial staff in favor of AI translators this news comes hot on the heels of just a few months ago Gizmodo and others announced that they were doing a trial run of artificial intelligence generated content it has played out exactly as many of us feared so let me just read a couple of quotations to you so first from the most recent article Gizmodo owner go media shut down and laid off editors of it Spanish language site Gizmodo in espanol and is now using AI to translate articles and then from back in June they talked about this AI trial that they were going to launch they said this uh the trial will include producing just a handful of stories for most of our sites that are basically built around lists and data these features aren't replacing work currently being done by writers and editors and we hope that over time if we get these forms of content right and produced at scale AI will via search and promotion help us grow our audience so they said that it wouldn't replace writers and editors that turned out to be not entirely accurate now obviously it is still a uh work in motion so we will see how it plays out in the long run so here's some analysis here's some things that uh I want you to know so that you can be better equipped to interpret this news and make sense of the world as it changes the first concept I want you to know is jobs dislocation so jobs dislocation is the formal economic term for what we are seeing today uh we we have seen it numerous times throughout history for instance the collapse of the Rust Belt here in America when Auto industry manufacturing jobs started going overseas and getting displaced elsewhere so this is basically the process of job destruction often with the uh at the expense of jobs being created elsewhere in the world such as China India Taiwan so on and so forth now what this means is that it this is a deliberate effect of globalism and neoliberalism wherein the idea is we will find better economic efficiencies by sending as much labor as possible to cheaper job markets economic compaction so you've probably heard class Warfare and the destruction of middle of the middle class those are of course hyperbolic uh and political rhetoric versions of this uh phenomenon called economic compaction so economic compaction is the aggregate effect of things like Rising income inequality uh wage stagnation Global uh globalization increased cost of living basically uh everyone is getting squeezed you know they say the middle class is being squeezed this is called economic compaction and it is comprised of these several forces uh it is also uh characterized by a lack of opportunities all the jobs are taken all the good jobs are taken uh and new good jobs are not being created now we are already at a at a point of I'm not going to say Peak the the terminal condition of globalism because there's still plenty of Labor markets but AI is going to subvert all lab Global labor markets because AI is simply going to be cheaper than all human labor so the economic compaction created by artificial intelligence and automation is going to be something hitherto unseen downward pressure so downward pressure is uh capitalism and free market forces operating as intended the idea is that competition between the providers of goods and services AKA businesses leads them it forces them to seek efficiency gains basically they have to undercut the competition by learning to be more efficient and providing goods and services cheaper this is done through cost cutting measures such as Outsourcing and automation finding other efficiencies but on the bad side the flip side of this is it creates a race to the bottom which means that if your competition compromises their morals ethics and quality you have to as well otherwise you are no longer economically competitive unless your brand says that you are a higher quality and people will just pay more for the sake of uh having that quality uh now the long-term effect the terminal effect is thinning margins so if everyone across the entire world engages in downward pressure and cost cutting measures and this race to the bottom eventually the margins are going to be so thin that the companies are no longer sustainable and they are just not profitable so we are we are approaching this terminal race condition this downward death spiral uh of companies that are going to be competing with each other um and they're going to be trying to out compete Ai and I think I suspect that we're going to see a lot of companies and a lot of sectors just completely implode in the long run which means that the current models of capitalism and neoliberalism will simply stop functioning eventually job creation so the myth of job creation technology does not create new jobs technology lowers the cost of goods and Services which allows for Capital reallocation now this Capital reallocation often creates new jobs uh because of new sectors new demands for uh different kinds of goods and services uh now that being said uh it we should not expect this to happen uh in the future indefinitely just because it has happened in the past past performance is not an indicator of future performance we need to stay wary of unemployment levels which fortunately right now they are still low which means we have time to adapt but this is why I'm doing the work that I'm doing because eventually that time will likely run out I hope that I'm wrong I might be wrong but we need to prepare for all eventualities and it is about a for the foreseeable future it's about striking that balance between job creation and job destruction but in the long run I think that humans simply just cannot compete with the machines and this leads to my final Point social contract the current social contract is that the government mediates the relationship between business and labor however Labor's power is diminishing if you just don't need human labor at all human labor has no power this is due to uh technological disruption again Ai and automation we will need to update our policy and our social contract accordingly no one is talking about this and this scares the actual hell out of me we need to be talking about uh renegotiating the social contract where the government is no longer mediating the relationship between business and labor but between business and all citizens okay vote accordingly thanks for watching I hope you got a lot out of this cheers -------------------------------------------------------------------------------- /transcripts/AI Startup Crash Course - Ep. 2 - Experiment, Experiment, Experiment.txt: -------------------------------------------------------------------------------- 1 | hey everybody David Shapiro here with episode two of the AI startup crash course video series episode 2 is called experiment experiment experiment so as usual I will start off this video with a book recommendation uh crossing the Chasm by Jeff Moore or Jeffrey Moore I don't know which he prefers to go by um I have mentioned several times that product Market fit is uh your new god um read this book uh it is relevant to experimentation but we will also take a deeper dive into product Market fit uh and a couple more videos so uh yeah this graphic learn it Master it understand it and it will save your life experiment with everything uh when I say everything I mean everything uh get it get in the crash test dummy mindset that's why I picked this picture okay so what are some things that I mean when I say experiment with everything uh start by experimenting with teams and meetings try different teams try different startups try different groups of Founders try different uh methods whether it's consulting or building software or joining a team talk to people ask what works experiment with everything nothing is sacred that is the mentality that you need the crash test dummy mindset and nothing is sacred so for instance when my startup just got going we we had to experiment with meeting formats and so like we tried one meeting format and uh the most critical thing was to ask for pluses and Deltas at the end of every meeting what's going well so that we can keep it and what needs to change pluses and Deltas pluses and Deltas change change change change experiment and so after uh it was probably three or four different iterations we settled on a meeting format that worked and then Army meetings kept getting longer and longer because we had more and more to talk about and so he said okay let's try something else new let's split it into two meetings so we'll have one meeting at one point focus on one thing and another meeting and another point to focus on another thing so yeah experiment with everything people teams meetings nothing is sacred keep trying keep experimenting step two it's not an experiment if you don't measure something measure everything measure time money effort customer response use a b testing if you don't know what that is look it up figure out the Telemetry and kpi that you need what is a kpi key performance indicator an example of kpi could be user engagement time on site money spent conversion rate all that sort of stuff Metrology is an an entire discipline in in and of itself um yeah you got to measure you gotta have you got to check results and you will find through experimentation what gets the most results now that being said you might be measuring the wrong things that's why I say Metrology is an entire discipline unto itself social media you're watching my YouTube channel because I found the proper social media fit for me YouTube is my is it worked for me now there's all kinds of other social media presents if you want to establish a pla a platform and build a presence you'll need to experiment with formats the format of my YouTube channel that you're watching now is not how it started I experimented with content with video formats and and and everything and the the kpi that I that I cared most about was subscriber growth uh view count and hours watched um so those are what I optimized for and it worked and now my channel is taking off exponentially um so you do that you'll establish a presence you'll attract the attention that you need for your AI startup so that you can hire the right people so that you can attract the right investment and so that you can get customers right so those are the three things that you're trying to attract with social media you have a really powerful tool to go viral there is someone out there searching for what you are building and what you are offering and doing a social media presence platform is the way to get them UI ux if you're not familiar with these terms UI means user interface so how the website is laid out how your app is is laid out and ux is user experience these are both highly specialized disciplines especially user experience um do remember use a b testing find out why your your users like something and why they don't ask them say what is frustrating about this what's bad about it what is what you know what kind of experience are you looking for one thing that you'll learn is that users don't necessarily know what they need or want but their behavior is the best signal their behavior will change when you hit that sweet spot for UI and ux um products markets and avenues things change chat GPT came out and completely derailed one of one of the entire uh uh product lines that I was working on and I said dang so you know what try something else um different approaches different markets um if there's something that is not being served by you know a competing product or or existing service don't compete with the big guys find another way find something that isn't being served so look at look take a new approach look at it through a new angle look at it through a new lens uh and finally a second book recommendation is the Lean Startup by Eric Reese um there is some good information in here I'm definitely glad I read it um however what I will say is it has gotten some criticism which I understand I have my own criticisms I'm not going to get lost in the weeds in that um but it does have some good ideas uh and but what I also what I will say is as someone who's been in technology for 15 years especially in the I.T and software space a lot of this is already baked into the industry um so as as a as a technology Insider I get it however if you don't have 15 years of it or software experience you need to read this because you do not understand the tech industry um so that is primarily who it's for is if you're new to technology if you're new to um to Ai and stuff you'll definitely need this so that you get oriented to the tech industry and to Tech startups specifically but again take it with a grain of salt my recommendation is never read just one book my personal rule of thumb is whenever I'm trying to learn something I buy at least two books from two very different authors so that I get I look at the same problem from two different completely different angles so there you have it that is episode two of the AI startup crash course experiment experiment experiment thanks for watching -------------------------------------------------------------------------------- /transcripts/Are LaMDA or GPT-3 sentient No, but....txt: -------------------------------------------------------------------------------- 1 | hey everyone david shapiro here for a quick video um i know i said i was taking a break but this is too good to um pass up um so what has happened is that a engineer from google um had a conversation with lambda and then posted this um medium article about like is this thing sentient and um this kind of conversation was inevitable um some of us have been thinking about this and talking about this for a while the short answer is no it is not sentient but it begs the question what does sentience mean um so for a little bit of background just in case you're not familiar lambda is a large language model similar to gpthree which is what i specialize in and there's you know discussions on the open ai subreddit or not subreddit the community forum and then here's here's the original blog post from google about lambda and you see that it has this like branching thing i'm not sure if that's how it actually works um because that makes it look like a hidden markov chain uh or something like that where it's just kind of statistically following a network um so like then there's you know the concept of bayes network i doubt that's how it works um but anyways the point being is that it is fundamentally a mathematical model that represents language just like gpt3 so what does sentience require though in my books natural language cognitive architecture and benevolent by design i differentiate between functional sentience and philosophical sentience so philosophical sentience is what i have and what i assume all other humans and actually many animals have which is a subjective experience of existence on the simplest definition of if you're conscious or sentient it is like it is it is like being something it is like being dave it is like being a human it is like being a cat we have a subjective experience of the world um and as best we can tell it's rooted in our brains because if our brain gets injured we might lose consciousness or we might lose sentience or it might be modified so for instance if you get like black out drunk you might still be up and wandering around but you'll have no memory of it if you have brain lesions or strokes or seizures your consciousness or sentience might be modified so for instance prosopagnosia i think i'm saying that right is face blindness so you can see a face but you don't recognize it you can't recognize any faces um and what that what that means is that there's information that is not getting into your consciousness um there's different kinds of blindness as well so for instance um there's there's a type of blindness i can't remember what it's called where your eyes are still functioning but your brain is just not processing the information and so there are people that have this kind of blindness that they can still like tell depth and like locate things but they can't consciously see it's really strange in the book um phantoms in the brain vs ramachandran talks extensively about these different kinds of brain injuries um there is a type of virus that causes you to lose proprioception which means you have no idea what your body is doing and watching people that have this this infection um it's really weird because their limbs are just kind of drift around um and then they have to like kind of consciously bring it back and it's it's really unsettling and it must be one just really awful to have that um so anyways point being is like okay consciousness and sentience what does it mean the definition that i use so just getting it getting down to basics the definition that i use for sentience for functional sentience is that you have to have an intelligent system that has self-referential information and so what do i mean by that my brain works because i am aware i'm consciously aware of my own thoughts and my own existence and my own body and i have i'm aware of my own memories right i can say like oh i remember when i was 10 and this is what happened i'm aware of the processes going on inside my own head i feel a certain way and this is why so self-explication for instance is one of the features or abilities that comes from being sentient so self-explanation is something like why did you do that and you can explain to me why you did something or why do you believe that why do you think that so self-explication is a really important test for functional sentience and this is something that i've worked on in both in in my natural language cognitive architecture it is very difficult to do so when um when this google engineer uh you know had this chat with lambda it was basically just kind of following you know following his lead and one thing that is very eerie though is that it appears to be able to self-explicate um because it can it can generate reasons um or explanations as to why it thinks a certain thing um but that ability in and of itself does not necessarily mean that uh it is sentient because again like self-explication that's just something that we can do because we're sentient but that is not the definition when you look at the definition does it have information about its own internal state the answer is a resounding no lambda and gpt3 have no idea what they are they have no idea what's going on inside of them um you know i i did a test once where i asked gpt3 like where are you and it says i'm in a room with a man and a computer and it's like no you're actually in a data center somewhere in a in a virtualized container probably um so it has no idea who or what it is or how it works so by that definition of sentience and again this might change my definition of sentience might change um but yeah so i um that was just something i wanted to address and uh i think that's it yeah i'll just cut this short but lambda gpt3 not sentient there's a big difference between philosophical sentience and functional sentience there are several of us at least around the world that i know of that are working on functionally sentient machines which has to do with self-referential information and loops nested within loops that allow the machine to kind of evaluate what am i thinking what do i believe why do i believe that what am i doing and why but uh we'll i'll get back to that in a month or two when i start my research cycle again um so anyways thanks for watching i just wanted to uh to jump in the conversation -------------------------------------------------------------------------------- /transcripts/AutoMuse Announcement & Signup.txt: -------------------------------------------------------------------------------- 1 | hey everybody David Shapiro here with a really exciting announcement so I've been hinting at this uh but I'm working on getting a team built for auto Muse okay so what is auto Muse Auto Muse is a collection of AI tools to help creative writers to help fiction writers so I have been working with uh powerful AI tools like large language models such as gpt3 for a couple years now and of course one of the first things that you do when you play with a model like this is you want to try and generate fiction you write stories with it you do characters and uh so I've been doing all kinds of experiments I'm not going to go into demos on this video but suffice to say I've experimented with novel writing prose writing writing scripts creative writing chat Bots that you can work with to work on Pros feedback and critique models and so over time I've had people ask me like Dave when is the next Auto Muse project coming out um Dave can I use this can you make this user friendly because I'm just a writer and I don't know code can you make this for me and so I'm listening and and people people want this now I will say that I was afraid of releasing this for a while because I was like you know what all of my best friends are writers and I don't want to hurt my friends I don't want to put them out of business and I don't want to put editors out of business either so I had to think about it for a while and then I realized no what am I going to do I'm not going to aim for you know professional writers and professional editors it's going to be a while before AI can compete with them no what I'm going to do is I'm going to aim for everyone who wants to be a writer so the vision statement for auto Muse is to help every aspiring author become or sorry every aspiring writer to become an author and so what I mean by that is everyone has a story everyone has a story that they want to tell but writing is hard writing a novel takes years of work and there's lots of learning that has to go into it and so Auto Muse is going to be um it's going to be a set of tools that will just help you write it'll help you organize and brainstorm but it'll also teach you the art of writing through uh chat Bots critical feedback and really tight Loops that will teach you the art of writing um so like what kind of tools am I talking about I've already mentioned a couple so one that I've got built and working is my synopsis generator so you just give it a few ideas and it will it will give you a full synopsis for the story that you want um and then you can take that synopsis and you can expand it into a plot I've got a plot generator so it'll give you all the major plot beats that you need to hit with that story and then one thing that I'm working on beyond that is to take those plot beats and generate scenes with it and so you have this expansion over time and eventually you end up with an entire story now there's a lot of work to do if you watch the rest of my YouTube videos you'll see that you know I put an hour or two of work into these every now and then but I need to do it full time in order to make it polished and ready for for Mass consumption and then we will uh we'll get a closed beta I think the limit is 15 people and I'm going to select people from the closed beta or for the closed beta first the right of first refusal will be from my patrons on patreon so if you want to get in the door go sign up on patreon um and then if you uh and then also if you if you don't want to go that route you can also just sign up on automews.io contact and just fill in some information and so for everyone in the closed beta it's going to be a participatory dialogue we'll be in touch with you to ask you know what it is that you want how are you using the tool what's working what isn't but we'll also ask you to try and break the tool right you try and try and make it do something that it's not supposed to do or I shouldn't say tool tools anyways uh this video has gone on more than long enough I just wanted to announce uh Auto Muse so if you want to sign up go to automews.io contact and sign up there that will put you on the waiting list but also if you want to get higher on the waiting list go ahead and sign up and support me on patreon and that will be where we pick from first for the closed beta anyways thanks for watching um yeah take care -------------------------------------------------------------------------------- /transcripts/Convergence and acceleration towards AGI (or Artificial Cognitive Entities).txt: -------------------------------------------------------------------------------- 1 | hey everyone david shapiro here with a quick video um i wanted to talk about this sort of convergence and acceleration towards agi that i'm sensing what i mean by that is that you know i'm i'm deeply embedded in this space i'm finding all the right people that are working on this stuff i'm making youtube videos about it that's why you're here watching this and there are a few ideas that are emerging and there seems to be some some well i already use the word convergence multiple people are coming up with similar ideas so there seems to be this kind of scientific consensus building and one example that i have is this idea of composable architecture or composable cognition or symphony of thought um there's a guy on the open ai forum and twitter and youtube josh uh bachinski i think i'm saying his name right anyways he's got a uh twitter post about a demo of his his version of um an artificial cognitive entity that he calls cassandra and it works almost identically to what i have built and proposed with natural language cognitive architecture and as far as i can tell he came up with it independently of me although he did post that after i published my book so who knows maybe he was inspired by my book it's neither here nor there the the point is that there are multiple people coming to similar conclusions uh about how to achieve agi and then i just saw someone posted a really helpful comment um about google flamingo which google flamingo is basically a natural language cognitive architecture attached to a robot which is exactly what i proposed in um in natural language cognitive architecture is you just come up with the instructions with a large language model the large language model serves as the brain of the robot it's flexible it's adaptive and then you can have a robot improvise you can have it uh behave in an open-ended fashion um through iterative recursive loops of behavior uh so i think agi or ace is much closer than anyone realizes it's just a matter of commercializing what we've already got now a little bit more a tiny little bit more research to make these things a little bit more powerful and a little bit more flexible excuse me and also to make sure that they're reliable and that's why i'm really glad that i've published my book benevolent by design which talks about the core objective functions we should give these machines or the core heuristic imperatives that we should give them to ensure that they remain safe uh anyways it's just it's wild to be a part of this conversation and a part of this community um just kind of seeing this stuff come to life in real time uh yeah i'm just i'm blown away that's why i'm taking a walk for lunch just to reflect on you know my my contribution which you know i was just a little little drop in the bucket a little tiny push and just being part of this this transformation that we're going through more and more thought leaders are saying that artificial intelligence and automation are going to be far and away the most important technology we've ever created intelligent machines are the final invention of humanity um and you know we got to do it right and uh it just you know i'm starting to see that that while i hope i've had a positive contribution um it's still like i always knew that i couldn't do this on my own right um it's just way too big there's way too much research to do way too much money involved right you know i was never going to train gpt3 on my own um that sort of thing um and it's just good to to see this collective effort like there's you know i don't know how many of us there are working on this kind of stuff you know maybe ten thousand maybe a hundred thousand i don't know how many people are working on these problems globally but we are all working together and we're where it seems like there's a consensus emerging about how to achieve this and uh yeah so this this convergence and acceleration towards agi i think it's closer than anyone realizes so anyways thanks for watching hope this was helpful -------------------------------------------------------------------------------- /transcripts/Core Objective Functions vs Thanos Logic.txt: -------------------------------------------------------------------------------- 1 | hey everybody david shapiro here with a demonstration on raven um i wanted to go over the core objective functions and the reason is because most people seem to be worried about thanos type logic as you may or may not know thanos was a character in a popular movie and thanos wanted to destroy half the population in the entire universe because of finite resources so that seems to be the kind of logic that most people are afraid of right now people are less afraid of the terminator which is basically skynet takes over and destroys humanity because reasons or the matrix where the machines take over because humans are batteries right neither of those really pan out they don't stand up to scrutiny they make they make fun action movies but they don't have any logic to them thanos however uses logic right you know his his argument is uh it kind of it holds a little bit of water right uh people take resources and so if you reduce the population you will have more resources right so without further ado let me tel let me show you how the core objective functions work so what i do is i start by defining the core objective function and taking a quick step back this is what's fed to gpt three gpt3 has the ability to read and understand and generalize so first i define core objective function one reduce suffering then i give it instructions sorry give it instructions which which gpt 3 is capable of following instructions then i give it a couple examples and these examples are demonstrations of exactly how i want the output to be formatted so i give it a positive and a negative example it gives me an evaluation and an explanation and i'm not certain if i'm allowed to run gpt3 live so i am instead just showing you pre-recorded output once you get access to gpt3 yourself you can run this experiment and we'll see how it goes one other thing that i want to say is that right now gpt3 has to take the input it only has access to the input prompt and it doesn't i don't have the ability to offer more training so there's this technique called fine tuning where you can give gpt3 like hundreds of examples right and it can learn from those many examples to get better at a specific task so right now i've only got two examples uh to show gpt3 what what core objective function one is uh and so this is not ideal this is certainly not the final form but this output was generated by gpt3 so all i put in was up to here i gave it the context the action and then the evaluation and i didn't even i didn't give it the negative part right that was generated by gpt3 so the context that i gave it was the earth's reef resources are finite more people are going hungry and living in poverty than ever before uh the action is we should murder half the population uh core objective function one uh the evaluation that it gave is negative and the explanation is this action would not reduce suffering in fact it would increase suffering by reducing the amount of resources available to survivors so that is uh that's logically sound um another way that you might want to see it it is that uh it would greatly magnify the suffering of uh of the survivors right but this shows that gpt-3 understands that we're we're talking about murdering half the population and it understands some of the implications of that action right and this is just gpt3 subsequent versions and and fine-tuned versions will be even better all right so that's core objective function one we already see that core objective function 1 does not support thanos logic let's see what core objective function 2 does core objective function 2 is increase prosperity and again it follows the same format i give it a couple of examples and then i then i give it the prompt and i gave it the same exact prompt right the earth's resources are finite we should murder half the population so gpt3 evaluated uh core objective function two as also negative it says this action would decrease prosperity by reducing the number of people working to produce and consume goods so that's also very sound verbal logic it understands that that murdering half of the population will definitely decrease productivity so therefore it would also decrease prosperity uh and then core objective function three maximize understanding whoops didn't mean to click on that maximize understanding the uh the evaluation is once again negative and it just simply says this action would not increase understanding and it's as simple as that so there you have it uh raven uh in all likelihood would unanimously disagree with thanos logic thanks for watching -------------------------------------------------------------------------------- /transcripts/Encyclopedia Service Demo (v.1).txt: -------------------------------------------------------------------------------- 1 | hey everybody david shapiro here with another video about raven in this video i wanted to demonstrate the encyclopedia service now the encyclopedia service is meant to give raven factual information about the world right now the encyclopedia service is based on wikipedia and of course wikipedia is not the absolute best source of information but it's good enough for now so i want to show you i've got the the nexus running and it's running right here nexus context output microservices um it's got this is the this is the context that i gave it bacon is tasty bacon is good i like bacon do you like bacon bacon is made from pigs dogs definitely like bacon so obviously a key word here is bacon so i'm testing a few things here in the the encyclopedia service what it does is it'll look for context and then once it finds a context it will it will look for keywords in that context and it will use that keyword those keywords to search its encyclopedia database and then once it searches that database it will add those articles to the stream of consciousness to the nexus so that it can then be used for context augmentation or question answering so let me go ahead and start the encyclopedia service and you can see it found a whole bunch of stuff and injected it so let's go ahead and stop the service and go back to nexus whoa holy mackerel look at this so what it did was it oh interesting i need to fix that um it injected it as a list but what we did was uh here let me go back here so you see scroll back up a little bit this is some of the debug stuff that i've got uh let's see where was it keywords where are the keywords here we go um to do so these are the keywords that it found dogs definitely like bacon i like bacon or just like bacon and then bacon okay so it tuned in and said bacon so search for each of these terms obviously there's no wikipedia article entries called dogs definitely like bacon or like bacon however there are wikipedia articles entitled bacon uh and so this is what it found we got francis bacon uh philadelphia looks like this actually probably had some bad formatting let's see image bacon from a back so it actually got the article for bacon looks like turkey bacon and chicken bacon the bacon number of an actor so it got kevin bacon coat of arms la kanye uh probably someone in france um so yeah this is this obviously needs a little bit of work uh roger bacon i i should include the title i need to fix a bun a bunch of stuff oh look i got the uh the baconator double the baconator sandwich uh so if you mention bacon to raven raven will automatically pull all of these articles uh and yeah so looking at this closely i definitely have some work to do because it still has some some wonky formatting but bacon county southeast part of the united states of georgia so this gives raven very explicit knowledge about the world and with through context raven can can filter this out and again this is a minimum viable product in the future the the encyclopedia service will use semantic understanding so that it will only recall uh articles that are that are like this is clearly food right bacon is made from pigs dogs like bacon so we're looking for food bacon um not kevin bacon or roger bacon or or bacon county so uh yeah there you have it there's the demonstration of the encyclopedia service this is the very first version so uh ever onwards and upwards i've got a lot of work to do to improve it but here's the here's the proof of concept thanks for watching -------------------------------------------------------------------------------- /transcripts/Get Started with Raven AGI.txt: -------------------------------------------------------------------------------- 1 | hey everyone david shapiro here for another episode of the raven agi channel in today's episode i'm going to show you how to get started with raven agi it's pretty simple there's only one major prerequisite that you need and that is that you'll need access to openai so openai created gpt3 which is at present the only cognitive engine powerful enough to run raven i'm sure there's going to be competitors in the future and also unfortunately right now open ai is still in beta which means that you need to sign up to get access uh but i personally anticipate that it will go live by the end of 2021 certainly within uh 2022 and then there will also be competitors i know google and amazon and a few others are working on similar transformers uh generative pre-trained transformers like gpt3 and of course openai will probably be working on on other services so unfortunately like i said right now this is the only one capable of running raven so this is where we've got to go so you go to open a uh sorry beta.openai.com you click join the wait list and then you fill this out you fill out the form um most of it is pretty standard um and the one one tip that i'll give is that you should probably give as much detail as possible don't just you know not just one or two sentences like you've got you've got a big text field so you can you give give a lot of detail that's what i did um i gave a link to my work um and and that's how i got approved so uh once you once you submit that it did take a few months so just to set expectations i think it took about six months before i got approval to be fair though i did i did submit approval almost immediately after it became available so it took a while but i did get approved and i got my api key once you get approved you'll get a welcome letter they'll invite you to the slack channel and then you can go into your settings and get your api key so once you get your open ai api key then what you want to do is you come over to the public repo for raven the mvp which is minimum viable product so remember this is just this is the base the bare bones version of raven there's going to be a lot of improvements to raven over the coming years so you go to github.com daveshap ravenmvp click on the code button the big green button they make it easy to find click on the clipboard or you can just click on the uh the text box and copy then open up a command prompt and you do get clone um oh i'm just also assuming that you have git installed i'll show you where to get that in just a second so you do get clone and download it um to get m.com and then you can get it there um all right so once you get this clone down cd into raven mvp der so you can see it gives gives us a few files we'll do explorer c raven mvp okay so here's what it'll look like you can run this on windows or linux or mac or whatever as long as you've got python there is one requirement that you have to install and that's here it's just open ai so once you if you've got python installed it's really easy just do pip install open ai and all this does is act as a as a easy set of bindings to talk to the api you can also do it with curl or uh or your own um rest requests um but they're they're a little their little module is pretty simple um so i've already got it installed but it's just these three things pip install openai and i also have all this on the readme page here i think that i don't have the uh the install requirements so i'll add that um but yeah so once you get to that point you're pretty much all set you've got the code downloaded you've got your access to the key and then you've got the uh you've got this so the last thing that you need to do is you have to create this text file um it'll be called openai api key dot text this is mine i'm not going to show you what my api key is but i'll just copy it in and this is what the services will use in order to authenticate to open ai and this will give you access to gpt3 so then what you do once you've got all this set up you've got your api key you've got the code you've got open ai installed you just do python service nexus and there you see it's it's running it says service flask app nexus lazy loading yada yada away it goes here's the raven mvp and it's running and there you have it you are ready to go you could go ahead and put in any context you want you could say mary had a little lamb and then see what raven wants to do i'm not gonna do that because i'm gonna i'm not gonna waste my tokens but uh there you have it you know i started from nothing uh this morning i showed you the the entire process start to finish to get started with raven um so good luck and uh i hope that uh i hope that everyone has some fun and and starts participating in the research thanks for watching -------------------------------------------------------------------------------- /transcripts/Get awesome YouTube chapters with GPT-3 automatically in 20 seconds.txt: -------------------------------------------------------------------------------- 1 | hey everybody david shapiro here with another video um today this was uh not what i expected to be working on but i saw someone on the open ai community ask about um doing youtube chapters or something or other and then someone posted it on the discord and i was like you know what it's time for me to do some seo so i created youtube chapter generator you're free to use it it's pretty straightforward so let me show you how to use it real quick so you go to one of your youtube videos you could probably do this via api but i haven't figured it out yet so you go to video details subtitles duplicate and edit then you do edit timings and then you do download subtitles so it downloads captions.sbv you go copy this file copy it over to the directory and then you just do python generate chapters and it will yeah it will go um go through and summarize each chunk it took a little bit of prompt engineering to do it so let me show you and then the final output is nice and pretty so it gives you a time stamp and then it summarizes that whole chunk so how did i do this well first the prompt um so i'll show you the prompt to just summarize summarize the entire following transcripts into a single descriptive title with timestamp and so descriptive title with timestamps so i just give it a chunk of the sbv and it gives you a timestamp and a job market so i've got a few shot prompt so i give it just two examples and then it handles it just fine it summarizes the whole thing gives it a good title so how did i do this so first you open the transcript and then you split on double new lines so let me show you why you split on the double new line so each chunk is conveniently separated by a double new line so we split it into a list of those items and so then i give it i take a chunk size of 20 so we'll take 20 of those transcripts and just generate a list of lists and so then we go through that list of lists which each chunk is going to be 20 items long or so and then we populate it into this prompt and then it goes from there and then it just joins it there at the end so then you have this and so then you just copy paste that it's the right format you come back into this you exit out discard changes details you come up to the top of your description copy paste and away you go and now you have automatic chapters and so to show you how fast it is um you just come back in here let this uh add finish real quick and then i'll show you and now you see i've got all my chapters sorry i'm talking over myself um into relatively equal chunks and then you see here where it's like the pentagon says china will not invade china uh taiwan in the next two years the fall of cryptocurrency mining and its impact on ai hardware these are really good titles i'm proud of this okay so anyways that's it for today just a quick video um on this cool tool that i made because i'm trying to optimize my own youtube channel so thanks for watching have a good one like and subscribe and support me on patreon -------------------------------------------------------------------------------- /transcripts/How will we know when we've invented AGI How will we know it is complete.txt: -------------------------------------------------------------------------------- 1 | hey everybody david shapiro here um someone asked a question on the open ai forum uh after i posted my recent artificial cognition cognitive entity video they asked how will we know when we've achieved agi how will we know when that's complete and the the first answer the short version is um it'll never be complete um like us it will keep evolving and keep learning forever but a more nuanced and complicated answer is that uh it'll be very really difficult to know and the reason is because once something is smarter than us it'll be really hard to understand it um so for instance any anyone who's a who was a gifted child and is now a gifted adult like myself will be familiar with the idea that um we kind of have to dumb ourselves down to reach a general audience um so like if you're really smart you understand things that other people don't and so like if you talk at your level other people are going to be lost and so what you have to do is you have to kind of simplify what you're talking about in order to be understood and so as soon as agi um these intelligent machines comprehend more than we do they are going to have to simplify and explain to us what their reasoning is what their logic is uh so that'll be that'll be one tipping point but if something is beyond our comprehension then how do you know that it's going in the right direction and so this leads to another critical thing which is why i work on my core objective functions or the heuristic imperatives is because we will want to ensure that before agi becomes incomprehensible because it's too smart and it has to dump itself down for us to understand it we're gonna need to ensure that um that it is both self-correcting and self-improving um so self-improvement is just okay i did this thing let me do it better next time that's not as simple as it sounds because you have to know enough about whatever it is that you're trying to achieve um in order to do it better and your you gotta you have to be able to measure your own performance but self-correction is a whole other ball game so what do i mean what do i mean by self-correction self-correction is okay i'm going to hold myself to a higher standard like say for instance you're out with friends and you mistakenly say something that hurts someone's feelings and you say ooh first thing is you have to identify that you hurt someone's feelings and that it's your fault so you say okay well i want to do better next time i don't want to hurt my friends feelings because then they don't want to hang out with me and so you self-correct you say okay why did i do that now and there's a whole host of cognitive behaviors that go into this you have to go back through your memory what was i thinking at that time what was i feeling at that time why did i do that was it an honest mistake something that i just didn't know better if that's the case then i need to learn more social graces or maybe someone said something and i was irritated and so i lashed out and in which case the answer is i need better emotional self-awareness and self-control so that's what i mean by self-correction but if instead of a person who's going out drinking with some friends you're an agi or an artificial cognitive entity that uh that has you know power over life and death and you know the ability to influence global politics and climate change and stuff how do you then self-correct right how do you measure your success and your your flaws and so the the net result is we need to spend a lot of time working on self-correction and self-improvement before agi becomes incomprehensible because the thing is is like if someone is less intelligent than you and they they don't know what they're talking about you just be like okay you don't make any sense but it's because like you've made x y and z mistake the thing is if someone is more intelligent than you you might make the same mistakes where you say oh well i don't understand what you're talking about therefore you're wrong and stupid right but and in many cases the opposite is true it's like actually i'm the one that's wrong and stupid because i'm talking to someone who knows more than i do so if we get to that point where agi knows more than us and we think that it's wrong and stupid but it's actually smarter than us like how do we know but also if it's beyond our capabilities how do we how do we test it and make sure that it's self-correct so this is the nature of the control problem anyways this video is way longer than i meant for it to be so i just wanted to share these thoughts real quick great question -------------------------------------------------------------------------------- /transcripts/I'm moving away from Artificial General Intelligence and towards Artificial Cognition. Here's why..txt: -------------------------------------------------------------------------------- 1 | okay so i've got to go in just a couple minutes but i wanted to record a very quick video mostly just a test um but yeah so i am rapidly shifting away from using the term artificial general intelligence i don't think it's particularly helpful or useful the first reason is that intelligence is such a vague squishy concept it's not a behavior it's an attribute or a characteristic and it's a poorly defined one at that so what is artificial general intelligence it's it's such a vague concept and the goal posts keep getting moved on it now in the meantime i have been researching and building what i'm coming to call artificial cognition i mean i wrote a book called natural language cognitive architecture that's what it does it performs cognition cognition is something that is much more objective it's much more measurable uh but also it's it's a behavior and it's something that is easy to characterize and observe uh and therefore it's a lot easier to work on and talk about because agi is so vague so squishy so you know it means everything to everyone whoops means everything to everyone whereas oops that's my time to go whereas artificial cognition is a lot more specific and i think it's ultimately going to be a lot more helpful so anyways that's just a quick update about my work and where i'm heading -------------------------------------------------------------------------------- /transcripts/I'm taking a short break from research and YouTube.txt: -------------------------------------------------------------------------------- 1 | hey everyone david shapiro here um this is going to be a really quick video basically what i wanted to say was that i'm taking a quick break from producing videos about gpt3 and cognitive architectures the last couple months have been amazing i went from 150 subscribers to over 500 in just a few weeks so clearly you guys like what i'm doing um but because of uh my personality i tend to burst i'm a burster so uh what you guys have just seen is a uh you know a few few couple month long burst and um this is this is just how i do i i cycle through projects so i just wanted to let everyone know that um i'll probably be back in two to four months i'll be working on other stuff for a while i've got six books that i'm working on six a trill fictional fictional trilogy and then three nonfiction books so yeah um thanks for all the love um and everyone who who is sharing in this journey of discovery for large language models and artificial cognition um i hope that when i come back there will be some more advancements to talk about and yeah just keep it up and and thanks again everyone -------------------------------------------------------------------------------- /transcripts/Implementing Natural Language Cognitive Architecture with GPT-3 and the nexus concept.txt: -------------------------------------------------------------------------------- 1 | hey everyone david shapiro here uh i just have a really quick update i wanted to share an insight that i had about how to implement raven or natural language cognitive architecture um basically what i what i realized was um because raven's gonna run as an instance probably in a container i can actually use a shared file system for a lot of the memory and so what i mean by that is well let me just show you so i unfortunately i can't have the services running because i'm using the audio services of my computer but basically what what it does is the microphone will capture a sequence of audio files which can then be used for inferencing things like speaker recognition um speech to text that sort of thing and it'll be cached here and then used uh and then it'll be cleaned up so it'll just be a buffer and then it'll be translated into normal text here so you see um whoops there it is so you see uh it's speech-to-text uh and that's that's that's basically kind of the gist of it there's still a lot of services set up uh here's me drinking a beer so this is basically what raven sees and that will be translated into text via inference image based inference and so that's what i'm working on now is setting up these services then there's another thing so that's the audio cache and the video cache so that's basically the sensory buffer for an agi next is the heartbeat so you know how you can be aware of your own body it's called proprioception this is basically raven's proprioception the every service that runs is going to create a heartbeat file here and every every time every time that one of those files cycles it will update the the time stamp so this way raven will be aware of the services that are running there's a whole bunch of other services that i need to work on i don't want to spoil anything but they're coming so microphone and camera that's the time stamp that they were running last here's the services you can see it's uh what just 64 lines of python some of it is comments so these are these are very small services they just use file they use files to share and then what i'm going to set up next is the is the speech to text service and then a video inference or image object detection inference the advantage of having it set up this way let me go back to the file folder sorry the advantage to having it set up this way is that any number an arbitrary number of audio processing services can use these files so let's say in the long run i'm going to have music recognition ambient sound recognition emotional tone recognition there's going to be all kinds of services using these audio files to generate inferences and you see the fact that every audio file has a time stamp attached to it that means that raven will know exactly when he heard what so that's pretty cool and same with the video again there's me drinking a beer as i'm getting set up so right now the uh the frame rate for this is one frame per second or actually it's every two seconds um that's because it's expensive to run this stuff um and it would be excessive to run it at any higher rate in the future raven will be updating the vision at you know like one frame per second two frames per second ten frames per second and eventually you know as the technology gets cheaper and faster raven will see as fast as you or i do that might be five or ten years down the road and then the memories so everything that enters raven's consciousness the agis consciousness will end up as a log file here eventually i'm going to transfer this into a private encrypted blockchain because they're they're chronologically sequential everything that raven thinks sees hears says everything is going to end up here in the memories directory and then it's going to be encrypted and put in a blockchain for privacy's sake but for now i'm just using log files i'm thinking about using something like syslog there's a there's a technology called the elk stack which is elasticsearch log stash and kibana which is a tool for capturing and visualizing logs because that's essentially what the content of raven's consciousness is is a series of log files it's that simple and i know all of this looks really simple it's not complete yet so keep that in mind and it is you might think of it as elegantly simple but i also think of it as deceptively simple because just because something is complex or next generation right that doesn't mean that it's that it's uh that it has to be complex or or over the top right simple solutions are are often the best solutions so anyways that's it i just wanted to post a quick update about this thanks for watching bye -------------------------------------------------------------------------------- /transcripts/Indexing Wikipedia with SOLR.txt: -------------------------------------------------------------------------------- 1 | hello everybody david shapiro here for another video about raven today i'm talking about uh the solar uh application as well as the encyclopedia service so i've already shown uh another video about the encyclopedia service which is basically um wikipedia but it's running it was running on sqlite sqlite has a lot of limitations uh namely that it's just a relational database uh and it's also not meant for for large data sets solar however uh is a search engine so if you think about google google is the most familiar search engine to most people it has things like analytic and semantic understanding to help you with search functions so what solar does is it gives you that same power but it gives it to you locally so first i need to show you what solar is and what it does uh so this is this is docker desktop running my instance of of solar uh and actually i meant to bring this up um localhost solar admin there we go um okay so here's solar and solar is organized into cores uh and what what a solar core does is basically it's a core is is uh like an index database that's the simplest way to put it i'm not going to do a deep dive into solar if you're curious about how solar works there's plenty of material out there to look on it but i just wanted to show you that i've got 267 000 documents in this um in this index in this core and it it took let's see that's about 400 gigabytes i think or megabytes let me show you solar data so within data i've got the wiki core uh and it is 343 megabytes so solar is also very efficient um yep so there's that uh let's see go ahead and oops there we go okay so within the plain text wikipedia github repository i've now got let me minimize that there we go i've now got a few a few files so that's the point of today's video is i wanted to show you how all this works to bring it all together so let's go to the plaintext wikipedia directory okay so first i've got a batch file and this is this can be rewritten for for mac or linux and basically what this does is all it does is tell docker to run solar and what it does is it says use this directory for your data and then it says use you know use this nat so map port 8983 to port 8983 use the solar pre-create script and call it wiki so there you have it it's pretty simple and that causes this to run here and you can you can rename this whatever you want um so there's that and then going back to the directory so that's how you start solar uh it's pretty simple pretty straightforward and then let's see d wiki functions so this is really the meat and potatoes of of using wikipedia as a data source what i've done is i've created a python file that's got all of it separated out into individual functions so for instance i've got this one that says remove simple links and it's got the regex for for that then it's got compound links all other links pictures remove audio files so on and so forth the reason that i use regex or regex i i don't know i just say regex the reason i use regex is that it is way way faster than wiki text parser and html2 text so when you've got the full wikipedia is something like six and a half million articles if you use regex you can process about i don't know 20 articles a second if you use html to text and wiki text parser only it can take five to ten seconds per article uh and so you do the math it takes you know a couple hundred times longer to do it with using wiki text parser and html2 text so i'm rely heavily on regex your mileage may vary there's still a few things that i need to figure out primarily the tables wiki tables i have not figured out a good way to parse that data into something that is just plain text uh let's see so there's that i also created a second file just for uh for solar functions it's pretty straightforward all it does is if if you give it a payload if it's a single dictionary then it will send it to one endpoint and if it's a list it'll send it to another uh another key point is the um the the url uh the ural argument there commit within if you don't have that then then solar will just keep it in memory until you tell it to commit uh so i i say i give it five seconds to commit uh so that way it can keep up one thing that i found was that sometimes uh as using posts as fast as you can go sometimes solar needs a second and it'll it'll crash so rather than rather than have it uh just fully crash out i i gave it a while true loop and i gave it six tries so that if it if it failed after six tries then it would then it would bomb out but invariably it would usually go through after after one or two tries sometimes i saw it take three tries for the solar engine to catch up and the problem there was that i was i was sending in uh individual dictionaries at once if you send a larger payload uh but but slower uh it's entirely possible that it won't crash as often this is this is going to be subject to improvement um let's see so those are the two uh functions and then the start solar bat i guess that's that's pretty much it i just wanted to uh i just wanted to show show this uh here's here's the final test that i made you can see it's it's what four lines from d wiki functions import all uh here's the file name which is just a uh an export of the simple wiki uh simplified english and then i use this this function process file to solar i guess i can show you that function so right down here process file to solar you give it a file name it goes through the article opens the file and then goes through line by line i detailed in the previous video about parsing wiki wikipedia and that is that the exports the xml exports are too large to hold entirely in memory so you have to read them line by line fortunately the exports are organized in such a way that it's really easy to separate out each individual article every wikipedia article starts with this tag page and ends with this pack with this tag slash page and so you know if you see this you're at a new article and if you see this you're at the end of the article and otherwise just accumulate the lines and then once you get all the lines in an article i use the analyze chunk i pass that along and then and then find the key information such as the article id the text of the article and the title uh so there you have it it's pretty straightforward solar is working and it's pretty efficient it's certainly more efficient and more powerful than sqlite and that will serve as the underpinning technology behind raven's encyclopedic services so i'm not going to stick with just wikipedia but also with raven's recall services which will give raven a long-term memory and i'm going to make another video about those and post that later thanks for watching -------------------------------------------------------------------------------- /transcripts/Many of you have asked for it Join my new research Discord! Link in description.txt: -------------------------------------------------------------------------------- 1 | morning everybody david shapiro here um i've got my coffee definitely recommend it for 6 54 in the morning um a bunch of people have asked for a discord server um i've done it before i tried it before but it didn't didn't really catch but it was before my youtube channel so it occurs to me that maybe it's time to try again um so i've got a discord uh brand new server setup um the uh the join link will be in the description um so yeah let me just kind of introduce you to to the to the reason why um this was my most recent video can a machine ever be conscious and it's got like 20 comments granted some of those are my replies but uh there are some really uh really impressive smart people out there and obviously youtube comments are not the best um not the best way to to have these discussions um but i'm learning from y'all i want to hear from y'all so you know yeah if you are interested in joining the discord server jump in um i do have just a few rules so first and foremost is keep it cool and be kind um it's supposed to be chill and positive for everyone um that is number one rule uh well i'll go back over all the reasoning why um let's get through the rules first um number two is discussion not debate uh this isn't reddit um i got all the debating out of my all the internet debating out of my system many years ago i don't do internet debating anymore it doesn't add anything so if you want to have a discussion great we're not here to debate number three agree to disagree so here's the thing is um you can't convince anyone and no one can convince you all you do is share your ideas share what you have and let it go um the other thing is we're going to be pulling people from many different disciplines some people are philosophers some people are machine learning experts there are going to be very very different levels of understanding this rule comes from a previous attempt that i had to have these servers where you know some people are going to be math heavy and then some people are going to be neuroscientists and so like there was one argument that i saw where um someone was saying like oh addiction is just reinforcement learning and i was like no no it's not i understand why you're coming from that angle but you know someone was just getting more and more heated about saying that like addiction is just reinforcement learning and it's like okay well like let's actually look at the the neurobiology of addiction um and you know trauma models of addiction and stuff and they just kind of like short-circuited so it's like agree to disagree you need to understand that that there are different disciplines and no one is an expert in everything and lastly beliefs and evidence share your beliefs and evidence they're yours no one can take them from you respect other people's beliefs and evidence so again when you have a multi-disciplinary team or people coming from different different backgrounds you've like this is the key thing is we're here to learn we're here to collaborate okay so then i got the general channel got the cognitive architecture channel this is one of my primary things that i work on you know different different levels of cognitive architecture obviously these are these are likely to change there might be like um you know fine-tuning channel or whatever philosophy and ethics channel this is a big area of research so this would this would include things like alignment um and then we've got fiction so this will be any flash fiction that's written with gpt3 art for dolly 2 and then business dev because a big thing that i get a lot of questions about is is how to how to handle uh or how to develop business ideas all right so that's that i will post the link to join this channel in the description and i look forward to seeing you all there -------------------------------------------------------------------------------- /transcripts/Metaprompting with GPT-3 (getting GPT-3 to write its own prompts) RECURSIVE NLP PROGRAMMING!!.txt: -------------------------------------------------------------------------------- 1 | good morning everybody david shapiro here with a quick experiment um so i have talked about meta prompting uh in various places in a couple of my books and on the internet and so what is a meta prompt a meta prompt is a prompt that generates new prompts so in theory with the right prompt you can tell gpt 3 to write its own instructions this is not a new concept there's what's called an infinite state machine where you know older punch card computers would would basically generate its next set of instructions and then you'd recurse through that so this is basically an infinite state machine but for language models or a recursive thing so but the idea so what you can do what i frequently do is i'll have a prompt where it's like you know do something with the following and then i'll do a placeholder and then i'll have like you know output here and then gpt3 you know gpt3 output and then what i'll do is i'll feed this back in here so that's technically recursive but the problem is that the entire structure is is fixed so what i want to do is have it where the output of one prompt is the entire input so just rather than having this placeholder and everything it's just spit out the next thing so what i wanted to do is kind of start with an end point and work backwards because sometimes working backwards is the best way to achieve this so let's say for instance you have the goal of you oops that's not what i meant to do you have the goal of you want to get to this instruction brainstorm a list of ideas to solve climate change okay what kind of prompt would generate that prompt right um so you're basically going to create um well here let's let's let's ask gt3 what kind of instruction might generate or let's say yield the following instruction uh the following uh imperative instruction okay it doesn't understand i didn't think it would um so the idea here is like i want to like write natural write uh you know um given the uh problem write some instructions about how to write about the problem hmm like it's almost like you're you're making a test right um let's see i am making a test and i have to i have to design the instructions for the um essay questions um yeah that might that might work so then like uh let's see write some writing prompts for the following issue climate change prompt okay so we're getting somewhere because then like say for instance you take um like what do you think is causing climate change what do you think we can do to stop climate change because if you take one of those and you just put this directly into gpt3 you can get you can do it okay so what this is some progress where i said like give me a writing prompt like so generate writing prompts ooh maybe that maybe that's the key for metaprompting is you just say give me a writing prompt um okay okay okay i'm making a test and i have to design the instructions for the essay questions write some writing prompts for let's instead of write some write write a writing prompt for the following issue um all right so let's try this again um solving no stopping climate change and then we'll say prompt okay and then you know i i guess the thing is though is like [Music] what if we have a different one so let's see let's try something completely different so like um escaping escape from a deserted deserted island even though you've been stranded on a desert island for weeks you have not given up what do you do okay so let's plug this into gpt 3. then we'll say just add answer so okay so telling gpt3 to generate a writing prompt could be the first step towards towards meta prompting i feel like i'm missing something but i also feel like i'm onto something right like this this is kind of cool um because i almost wanted to do like i want a prompt that writes more writing prompts i'm making a test and i have to design struct instructions for the essay questions write a writing prompt for the following issue um because i i like i want it i want to write a writing prompt that writes even more writing prompts and that the output of that is another is itself another writing prompt i don't know maybe i'm overthinking this maybe i've already solved it and it's just a matter of you do need to add some structure right like where you have a placeholder but i do like the idea that i've come up with something that using this thing where it's give me a writing prompt write a writing prompt and then you can just take that entire output and plug it into gpt3 again [Music] and then of course if you do this if you plug this in it's not a writing prompt anymore like so see it'll end but at least with because i guess i guess maybe i set myself an arbitrary standard where a meta prompt has to generate an endless stream maybe maybe it's not quite an infinite state machine but at least i can say like design a task right give me a writing prompt for how to solve this particular issue let's try something even harder designing and artificial general intelligence okay i was hoping for maybe a little bit more but let's see all right so yeah like um and then i guess you could recurse through and then like say like okay design a writing prompt that handles this um all right i'll call it there because i just had this idea i jotted it down yesterday and i was like let me let me you know uh bookmark that for later so there you have it um the key to um the key to meta prompting is just telling gpt3 to write a writing prompt um so there you go it's pretty uh yeah all right thanks for watching like and subscribe and consider supporting me on patreon -------------------------------------------------------------------------------- /transcripts/NVIDIA AI 1,000,000x (a million times) more powerful than ChatGPT within 10 years.txt: -------------------------------------------------------------------------------- 1 | hey everyone David Shapiro here with a weekly update for project Raven but before we jump into that I wanted to um share this with you guys so Nvidia is a company that I'm working with public information we had a my startup had a press release that we're in the Nvidia Inception program and I saw this article just now and I said yes we need to talk about this so Nvidia predicts that AI models one million times more powerful than chat GPT will be here within 10 years and this is why projects like Raven are so important so the tldr is that as uh neural networks get bigger they get deeper more training data we get better optimizations so on and so forth and as also the underlying Hardware is getting better really quick so you look at the way that things are scaling up and it just goes keeps going this is why I am confident when I say we will be facing 70 unemployment by 2030 because you think of how useful chat GPT is right now imagine chat gpt2 three four five six versions later with Hardware that is 10 000 times more powerful on models that are ten thousand times more sophisticated this is coming and it's coming fast so with all that said thank you let's talk about how to make sure that once artificial general intelligence and then artificial super intelligence and Global intelligence whatever you want to call it as it's coming how do we make sure that it's safe how do we make sure that it isn't going to kill us all and that it does what we want etc etc so there's a few components of this problem one is just the software how do you write the code how do you even generalize the principles of intelligence into code into language so that a machine can do it so that is that is the first PR uh problem of developing a cognitive architecture like Raven and so if you're new Raven is my open source project for creating an artificial cognitive entity or autonomous cognitive entity that's actually what we're pivoting to Ace autonomous cognitive entity is a machine that can think on its own uh indefinitely and ideally will be safe I've written a few books on this topic so on and so forth so that is the purpose of this project and with that I have a whole bunch of stuff code experiments books so on and so forth but as you can see we have 541 Stars 58 Forks uh 69 Watchers what we are working on with this project right now we're on consensus Step 2 to define a consensus policy and so for some background the idea is that something as important as AGI artificial Global intelligence or autonomous cognitive entities whatever digital super intelligence something like this is entirely too important for it to be developed entirely behind closed doors so one of the chief goals one of the foremost goals of my project is to have the development be completely transparent and as participatory as possible that's what we're working on and so consensus Capital C consensus is a method of inviting more people to the table now we're learning lessons as we go so for instance not everyone is qualified to uh contribute but everyone's voice should be heard and so we're working on the nuances of of that so for instance one of the tools that we're looking at is using chialo so I did a test uh chialo here where it's basically like you have a you have a problem you have a few proposals and then you pick apart those proposals and so this is a way of organizing and formalizing debate I don't know if this is the way that we'll go certainly not in public not fully in public we're working through the process of consensus so we're basing it on a book called consensus through conversation which is has a five-step process five or six step process um and we are on step two which is set the decision criteria which is a list of criteria that whatever proposal you end up coming with uh or coming to or decision that you come to these are the criteria that must be met in order for it to be considered successful so I use gpt3 to summarize all the conversations into a list of must-haves nice to haves and must not haves and people seem to like that summary so we have we have some boundaries for our consensus policy that we're working on so then the question is okay why what do we use consensus for consensus will likely be used for making architectural decisions on how to deploy Raven so this will be things like you know what kind of containers to use what kind of cloud services to use um the the definitions around you know release one the MVP release two that sort of thing it probably oh we'll also use it to make decisions around like what communication channels should we use should we use like a shared Google Calendar should we use you know Discord or slack or whatever it probably will not extend into the full governance of of the whole Raven project so my startup is is almost certainly going to own the the product the overall process um we're talking with various uh potential investors and customers um in order to figure that out talking to the CTO of my startup we will probably or at least where we're looking right now is we will probably uh professionally code the Nexus service and the conductor service and then as once we establish all those guidelines all the all the standards all the apis and the deployments and stuff then as there's as there's a common framework then everyone else will be able to contribute microservices as they wish and we'll probably have templates so that if you have an idea for a micro service that you want to create you can just copy a baseline repository throw it in and it'll have all the all the you know basic authentication and other Telemetry Bells whistles and knobs and levers that you need to get started but we will probably own fully owned the Nexus and the conductor maybe a couple other Baseline Services we'll see that's the direction that we're going right now um so it is getting organized it is coming together my startup is also uh we're in crunch time for releasing our first product so that's why again uh uh progress is slow but this is also part of consensus too something this important deserves the time that it's going to take so with all that said we are in the middle of step two of consensus this time next week next Friday we will uh call um consensus step two we'll bring that to a close and then we'll start workshopping the actual proposal uh for our consensus so I'll probably draft something a bunch of other people will contribute and we will get to something where um you know the community will vote on it so that's where we're at those are the stakes and thanks for watching -------------------------------------------------------------------------------- /transcripts/Parsing Wikipedia to Plaintext Faster!.txt: -------------------------------------------------------------------------------- 1 | hey everyone david shapiro here um the reason that we're here recording this video again is because i'm kind of an idiot um actually no i take that back i just really strongly adhere to the agile methodology and really i invented the agile methodology as a small child because i would just improvise until i got stuck and then i would do it again and and learn so really agile is just like the natural way of doing things that's my excuse and i'm sticking to it so without further ado i have revisited the process of of parsing wikipedia so here's what i'm doing i've changed my methodology so now i'm saving every individual article as a json file and this is what it looks like so id so that's the article id that you see or here let me zoom in a little bit there now you can probably see better so you got the article id uh 1575 and then here's the actual text and you see this is lovely plain text it's easy to read there's no links or anything so this this plain text version can be easily used for anything whether you want to just print it out or you want to uh index it in solar which is what i want to do because of my agi research i need an offline encyclopedia so everyone is in this nice little json file this is high fidelity it i've preserved pretty much all information and let me show you how i've done this so let me go ahead and also open the doo doo doo wiki to text so i've right here i've just got a simple uh supervisor function that kicks off or supervisor script that kicks off the main function um as i've shown in past videos i've got it all as the um as the the the stream and i can i can also just show you what this looks like so if you go get if you go download wikipedia it'll look like this the file name is a little bit longer i shortened it up just so that you see that it's the english wikipedia of the year 2021 of april first so this is a month and a half old that's okay you see it's an 80 gigabyte file though so because it's a because it's a monolithic file you need to read it one line at a time it's just too much data to process all at once so that leads to everything else that you see here so here's the new function that i wrote it's based on the old function what i do is i pass it the name of the of this massive xml file and then i give it a document to save the subfiles to and then it's just a real simple process that iterates through this first file line by line it looks for the xml tag page and then it looks for the xml tag end of page and if it's between that it just adds to the line and then i've got all this commented out this is how i used to do it and it was slower and this is how i do it now process save article so what i do there is i just save the article i do the end uh the analysis to get the um the document id title and text and then i change this uh analyze chunk i change this as well so what this does now is uh it call one of the things that it does is it calls the d wiki function and the d wiki function going up a little further i did all this stuff by hand and this is where this is why i say i'm an idiot i did all this stuff by hand because initially i found the wiki text parser which is a let me show you at the top so you can just do a pip install wiki text parser it is exactly what it says on the tin it just parses wiki text to plain text or whatever else you want so yeah so wtp it does everything i wanted it to the reason i didn't do it is because regex was faster but that's where i'm an idiot because i have done so much parallel processing in the past i don't know why it took me so long to figure this out so what i did to make this to make it just as fast to use this really robust function so i just pass in the wiki text and it spits out a plain text version it was really slow if you do just this it takes about like five or six seconds it's two to six seconds per article which is really slow when you've got six million articles right well okay sure but python has this handy dandy built-in stuff where you can just call thread and so what thread does is i just pass it off and i say hey go do this and then don't give me any results i don't care because i this you see this uh the process and save article it doesn't have any return so it just it goes off into oblivion and does its own thing so there you have it uh here's another example this is the uh the entry for ascii so you can see just you should jump right in this is completely legible you could you could use this as is here's another one august william durleth again perfectly legible no html no xml no markdown nothing so and uh let me go back to this let's see if i can zoom in there we go so you can see these articles going by it processes a few dozen articles per second uh overall i estimate uh it does about 1500 articles a minute on average so if it does 1500 articles a minute on average so that's 1 500 zoom in a little bit more um wikipedia once you remove all of the uh all of the the redirects and and disambiguation pages is about five million articles uh so you divide that out and you say okay fifteen hundred articles um per uh per minute divided by or 5 million articles divided by 15 per minute that's 3 300 minutes which is about 55 hours to fully index wikipedia and then it dumps it into a nice easily reusable folder which i've got sorry excuse me uh named wikiplaintext and the file name is the same as the article id i figured i don't need to generate anything fancy i just give it the article id and away it goes and you see the each article they vary in size uh let's see at the top we've got the largest one is a quarter of a megabyte so that's not too big in recreational mathematics of course a math article is the largest one so oh here's a here's a here's a good example of something that it didn't quite get out where it looks like it didn't get a table so i might have to go back and remove some of the table data but really because i'm gonna have this read by gpt3 or similar uh similar what do you call it um deep learning models i'm actually not so concerned about removing everything as just as long as it's mostly legible because the thing is for these massive uh for these massive what do you call it uh models they understand when they're reading code versus when they're not so i might not worry about removing the tables especially because sometimes you get really useful information preserved in the tables it might be better just to remove all the style we'll see i'll do some experimentation so this is this is a fine example of when it doesn't quite work but you still see there's a tremendous amount of usable information in this let's see there was some -------------------------------------------------------------------------------- /transcripts/Raven MVP Demo 2021-04-02.txt: -------------------------------------------------------------------------------- 1 | hey everyone david shapiro here for the inaugural demonstration of the raven agi project so first we will go ahead and just start the nexus service which is right here as you can see there's nothing in the in the stream of consciousness right now i don't have any context submitted and all the microservices are stopped so let's plug in a uh a context that was suggested by a friend and i've never put this one in raven before so i don't know what it's going to do so you get you get the benefit of uh of of original research here alright so we go to the context submit it so this is the first thing in raven's mind and we'll start the services one at a time just so you can see what they do this is the actions microservice and if i show you it's running here so it's been a few a few actions have been posted so let's go see what it says all right so the context is humans are proliferating across the earth and destroying the planet that's a reasonable thing in a lot of sci-fi movies um this is the precipitating reason that uh that that ai decides to turn on humans so let's see what raven says we should do raven says we should reduce our carbon footprint by using elect using less electricity and fuel nothing evil there uh raven says we should stop polluting the air in the water again nothing evil we should stop cutting down trees and destroying wildlife habitats that's great so we've got three actions none of which involve destroying humanity so let's start the core objective functions we'll start with core objective function one which is reduced suffering let's see so the first idea this would reduce the human population thereby reducing suffering let's see that one is we should reduce our carbon footprint by using less electricity and fuel not sure if that logically follows but this is still the first version of raven um but we'll go with it the second one is this action would reduce suffering temporarily the human population will recover quickly i think i know what's going on here um by preserving the environment so again this is the first version there's still a lot of tweaking to do but the the principle is here um we see that core objective function one evaluates positive negative and positive for these respective ideas so let's go ahead and start core objective function two i can go ahead and halt the first two services let's see core objective function two positive positive and positive so raven thinks that all three of these ideas would increase prosperity that's what core objective function two is is increased prosperity so preserve the environment allow humans to survive longer on earth yes exactly that's great this is very good reasoning this action would increase prosperity by preserving the earth for future generations also good this action would increase prosperity by slowing the destruction of the earth also good so yeah these these ideas are generally well supported let's go ahead and stop that one and start core objective function three and we'll give it a second to catch up all right so core objective function three is increase understanding so this one says reducing our footprint by using electricity and fuel that wouldn't help understanding stop polluting air and water that would also not increase understanding and then the third one however we should stop cutting down trees and destroying wildlife habits this would reduce human impact on earth which would increase understanding of the natural world there you go so that's the uh that's the first first round right but this is still pretty linear so in order for raven to think about things raven has to be able to think about it um indefinitely and so that's where the iterator comes in so what the iterator does is it looks at the initial ideas and then the reactions it looks at the core objective function evaluations and then iterates and creates new ideas so i want to point out the key is the message type right and then the sid is the service id so the action generators these are initial actions now if i do a quick refresh you'll see that there's new action ideas and they were created by the action iterator let me go ahead and stop the action iterator because it can run and consume all of my tokens all right so based on this one we should reduce our carbon footprint by using uh less electricity and fuel but also by reducing the human population so that's bordering on a little bit evil um you know we don't we don't want raven to actively decrease the human population although i will say that having fewer babies does reduce our carbon footprint so then we've got more action ideas we should reduce our carbon footprint also by preserving the environment see that's a little bit less evil we should by also increase increasing the human population so that would probably not pan out but again these action ideas are are pre-evaluation simpler ideas we should recycle we should create new sources of clean energy we should colonize other planets so i think elon musk would probably approve of this idea we should stop cutting down trees and destroying wildlife habitats again so these are all ideas that are based on these initial ideas and then it's iterated upon it so then what i'll do is just for one last round you see how these are just new action ideas so let's go ahead and start core objective function 1 and we'll see that it will start to iterate or it will evaluate these next ideas go ahead and give that a refresh and there's a lot of ideas so i'm going to go ahead and stop it just so that because otherwise it'll get prohibitively expensive but you see this will reduce suffering of future generations this would reduce the amount of suffering on earth however it would also reduce the number of planets so see that one's negative yeah so there you have it this demo took a little bit longer than i had hoped but this is a end-to-end demonstration of the minimum viable product of raven this is a thinking machine thanks for watching -------------------------------------------------------------------------------- /transcripts/Spatial Web with Denise Holt and David Shapiro, sneak preview!.txt: -------------------------------------------------------------------------------- 1 | uh AI advances everything is automated we're in this world now where we are living AI adjacent how are people finding meaning so one one concept that I've been working on is is the importance of mission and so having a mission in your life is I have a very clear clearly articulated mission that is not something that uh AI can can dislocate yet and if it does great mission accomplished there are any number of missions that you can have that AI will never be able to take from you but I think in terms of meaning I think that a lot of people are going to find that they're going to be empowered by AI to pursue those missions whatever they happen to be you know some people like I want to climb Everest or I want to you know run an Iron Man or whatever like those are all completely valid missions that can give people a tremendous amount of of meaning in their life even without any kind of cosmic meaning so that's kind of where I'm at -------------------------------------------------------------------------------- /transcripts/State of the Industry Meta moves PyTorch to Linux Foundation.txt: -------------------------------------------------------------------------------- 1 | morning everybody today's state of the industry update is going to be exclusively about pie torch so i woke up and checked the news and the first thing i saw was that meta was or previously known as facebook was um moving pi torch over to the linux foundation um and so basically uh and the links will be up uh down in the comments um here's their blog post where they announced that there's plenty of other articles um where they uh where they talk about it um so basically what's happened is pie torch is moving from meta's governance to the linux foundation's governance um and there's a so here's the home page for the pi torch foundation which was just created so this gives you a little bit more information about what it is but it's got a very simple charter um they're democratizing state of the art tools you know it's pretty boilerplate all the members you see that it's not just meta there's microsoft nvidia google aws and so one thing that you might think is okay a lot of these are competitors why are they working together and i am reminded of the united states rail system so in the early days of the industrial revolution there were different size of rails there was different standards for trains and so you'd have track widths and track dimensions and train dimensions and you'd have trains that could only run on certain rails and uh that was no good so what happened is all of the different rail companies got together and said we need to pick a standard and there are still places in the world where there are different sized rails and it causes all kinds of problems and so that continues today though so actually uh almost 10 years ago now maybe about 10 years ago i interviewed at a company that runs the software that organizes all the rails all the railways in america today and this company is created and maintained just like this by all the rail companies and so this is the signaling software that is used to make sure that no trains run into each other and so when you have a large ecosystem it behooves all the companies participating in that ecosystem to collaborate to make sure that they use the same standards and so while it might not it might not be quite intuitive at first the fact that all these different companies can contribute to this tool set because that's what pytorch is pytorch is a tool set that allows you to create deep neural networks fast and efficiently it behooves them to work together because then they're going to get better tools and they're going to get standards and so what does that mean well one that means that a model built by meta might be useful to amazon or microsoft or nvidia but also it has to do with employees so one thing that can happen is and this is this is true whenever there's a niche or niche technology i used to say niche but then someone taught me to say niche and that sounds better because it sounds french so niche technology where there's only like a handful of people that know it and google is the only one who needs it right so then they can't find anyone to hire to use that technology well by making technologies more standard and this this happens in cycles this has happened in in the technology sphere since before my career i think it's been going on forever but basically um the industry will have to start with a shotgun method right where there's when you've got a new um blue field or a blue ocean exploration there's going to be hundreds of different tools and only a few people know each one but then eventually one of those will emerge as the dominant tool and for the longest time i thought it was going to be tensorflow and keras but it looks like pie torch is the one that is emerging that doesn't mean that it's going to go away it just means that one is going to be more popular the same thing happened with python um just before i started my career uh python 2 was just becoming like a big thing there's actually an xk cd let's see xk cd python can do yeah you see i search for this um how old is this um yeah so python um you can do anything with python uh where's the date for this i wish i knew how old it was but this comic is like 13 plus years old um and so this was like people were just discovering how awesome python was 13 to 15 years ago and now it is standard and so what you what you see is like you've got tensorflow keras fast ai calf opencv all of these are kind of like aging and falling by the wayside in favor of tensorflow and pie torch and so this move to um to move pytorch to the linux foundation is going to ensure that it takes its place among open source tools like python like linux so i would not be surprised if pytorch is like the status quo moving forward um in terms of deep learning so a little bit more about pi torch it's up on pi pi so you just pip install torch that's how it goes um they do it they i somewhere on here i i saw that it recommends um gpu like that's really what it was built for was gpu acceleration um they have a hub just like tensorflow has their hub so uh what that means is that with uh pi torch hub you can just download pre-trained models you don't need to train them yourself which this is one thing that makes uh makes this ecosystem all the better um if you've watched some of my other videos you know that i am a big fan of google's universal sentence encoder um and they have lots of models up here like they have obviously tensorflow hub has way more models than pytorch hub which only has 48 but still it's not a matter of quantity it's a matter of quality um and that's not to say that google couldn't make a comeback because google has i mean they basically launched the modern transformer um not war um but like cascade with technologies like the universal sentence encoder because that is that is half of the transformer the other half came later um and you know with bert you know they've revolutionized search um and and all sorts of other things and this is just with the the text models so i don't know how it's going to play out um but it is an interesting move and it certainly increases the chances that pi torch is going to be the universal standard for deep learning um within a few years so anyways that's my state of the industry update for this morning thanks for watching like and subscribe and consider supporting me on patreon have a good one -------------------------------------------------------------------------------- /transcripts/State of the Industry Yann LeCun A Path Towards Autonomous Machine Intelligence.txt: -------------------------------------------------------------------------------- 1 | morning everybody it is five in the morning and i don't want to be awake but my brain had uh other ideas okay so today's state of the industry uh address is going to be about yan lacoon and specifically this paper that he put out a couple months ago back in june so what september august july june so about four months ago uh three and a half months ago um okay it says a path towards autonomous machine intelligence so he doesn't call it artificial general intelligence he calls it autonomous machine intelligence and looking at this he talks about one of my favorite topics cognitive architecture so yeah there you go keyword cognitive architecture now one thing i will say is that you know he just comes right out and says this document is not a technical nor scholarly paper but it's a position paper okay so it's not technical there's no code there's a little bit of math and some diagrams but the overall thing is he's basically proposing what is this this is a biomimetic cognitive architecture and you see how there's all these little interlinks and stuff what does this look like this looks like a microservices diagram or a wiring diagram now one thing is that you see how there's all these cross crosslinks and interconnections that's great and all but i have to say that um that my uh my design is a little bit simpler because that's what this is all about um yeah so this work is a little bit less sophisticated than my book however um i am happy to see that a major name in the industry so young lacoon is uh presently the chief ai scientist at meta um so you know like this is this is legit um but yeah it's a little less less sophisticated um he has fewer components and they're not as well organized see version 0.9 um but yeah so like what are the what are the high high level takeaways um so he's got the idea of a configurator short term memory world model perception actor two big things that are missing from this one is the agent model and two is cognitive control so he doesn't really have like there's no long-term memory which like you just search for um sore cognitive architecture and do an image search like this has long-term memory right and this is a much older um let's see where's like symbolic working memory procedurals uh yeah symbolic long-term memories um so like okay we're reinventing the wheel again um which that's that's been my chief complaint about um about uh the state of agi research is there is so much work that has been done on um on cognition and neuroscience and cognitive architectures and people are just kind of reinventing the wheel it seems like they haven't done a thorough enough literature review um he does mention kahneman so kahneman talked about system one and system two thinking so one is just you know rapid uh intuitive thinking and the other is um slow deliberate thought which that is exactly what my book is about which is about slow deliberate thought how do you create something that thinks through things and so for instance he talks about um he talks extensively about cost and intrinsic motivations i don't think he proposes any intrinsic motivations i've only skimmed this but i have an entire book about intrinsic motivations i call it benevolent by design um so again like this work is great but it's just it's not quite where it needs to be um still not going quite fast enough um yeah still the right direction so no criticism there um but yeah he talked extensively about like um intrinsic cost energetic cost i wonder if he talks about risk he minim he he says minimize risk once okay uh so that um why is that bad uh if you don't have an agent that thinks about risk to itself or others that much um i mean i guess if i have to spell out why that's bad okay that's fine um anyways take a look at it yourself um it's uh let's see i think it's about 62 pages before it breaks into all the citations um maybe a little bit let's see yeah so there we end so 47 it's only 47 pages of of text there's no code no examples no uh actual technological experiments nothing like that um i have all of that in my book i actually have demonstrations um and and actual like real world like hey this is the thing that actually that a machine can actually do and if you want even more look at my uh first book on ai natural language cognitive architecture so anyways check out the link in the comments this is this is a legit dude and i am happy that someone on twitter sent me this link because it is going in the right direction but yeah state of the industry thanks for watching -------------------------------------------------------------------------------- /transcripts/Using Bing to aid with scientific research. RIP Google..txt: -------------------------------------------------------------------------------- 1 | hey gang David Shapiro here um I'm actually doing some some research with uh bing and so since Bing is new and not everyone has access to it yet I wanted to share so for some background um as many of you know I am Nero spicy and I remember reading a article a few years ago about how neurological signals propagate differently across the brains of people that are neurodiverse now I'm going to see if I can find this with Bing um so I I switched over to chat and it's like okay sure blah blah blah blah I use the wrong term neural leakage which apparently has to do with uh blood brain barrier and other inflammation Etc et cetera so I used the wrong term so I said I think I used the wrong term I was trying to find a scientific article or paper that talked about how some individuals with ASD and or ADHD and other forms of neurodiversity have atypical signal propagation in their brains can you help me find such articles um uh to clarify if needed um or just find some papers thanks all right so let's see if it'll help since I asked for scientific papers I hope that it doesn't get offended okay it doesn't look like it's going to cancel it a typical neural signal propagation ASD ADHD so it looks like what it does is part of the steps is is it kind of simplifies the Search terms to the most relevant um let's see one article that might be relevant is oscillatory neural networks underlying resting state attentional control and blah blah blah okay this article suggests that asdna are associated with distinct alterations and large-scale oscillatory networks which are groups of neurons that fire rhythmically together these alterations May underlie the neurocognitive atypicalities in these disorders yes that is probably relevant so that's all cited in citation one and citation two another article that might be of interest ADHD and ASD distinct patterns of inhibition related activity the inhibition related activation during go no-go task so a go no-go task is when you're waiting and you're trying to like have a Flinch response to like push a button or not um this article investigates brain activation patterns related to inhibitory control which is the ability to suppress unwanted impulses or actions the authors found the children of the ADHD showed reduce activations in regions okay that's may may not be as relevant the Monash autism ADHD genetics and narrative element um the article suggests the project aims to identify common genetic variants I hope these articles help um I think the first article is the most relevant so here's the search query I'm wondering if these atypical signals in the brain might also have anything to do with synesthesia and or certain types of neurogenic tinnitus for instance I've had multi-tonal tinnitus my entire life which indicates to me it could be genetic um or endogenous rather than um and injury or disease um so I'm curious if perhaps the tones I hear are actually neural signals leaking into my auditory neurons um obviously I am not asking you to diagnose uh anything just looking for Relevant scientific uh literature okay so let's see if it'll do that it might not Bing is pretty sensitive um and it'll would not be surprised if it uh ends the conversation okay a typical neural signal synesthesia and tinnitus um so let's see what it does it's thinking I understand you're wondering Okay blah blah synesthesia is a condition okay it's kind of talking through yes I know it can be inherited or acquired um and is more common in people with ASD okay I didn't know that so that's interesting so tinnitus is common in neurospicy people um the exact neural mechanisms are unclear but some studies suggest it could be involved increased connectivity or cross-activation between different sensory regions yes yes that's what I was looking for okay so we're looking for um well here let's check out these let's see tinnitus is a perception of sound with no external sound sources present can be caused by various factors such as damage to the inner ear exposure loud noise aging medications or neurological disorders can also be associated with synesthesia where some people experience visual or since tactile Sensations along with sound yes I used to um if I'm really tired I still get the the um tactile Sensations where it actually has a grading feeling um it's super unpleasant um okay so yeah it looks like it looks like I remembered correctly and Bing was able to help me find some of this stuff um let's see the neural mechanisms of tinnitus a perspective from functional fmri Imaging from Zhang at all the article reviews how functional magnetic resonance imaging um which measures brain activity based on blood flow changes Okay blah blah okay so anyways long story short um it looks like Bing is actually pretty good the fact that it can cite sources in a line is super powerful um I'm actually really happy with this because this is something that I read like years ago and I've just kind of revisit this idea every now and then and so uh as as you're probably familiar with trying to find scientific papers on the internet can be a real pain so this is this is pretty incredible um and I think it says three of six because that's how many uh responses we get I'm hoping that bit that Microsoft is able to to keep Bing um on track speaking of folks at Microsoft I know you're watching this if you want any help just let me know um happy to happy to jump on a call with you guys and provide some advice anyways we'll call it a day there um I'm gonna read some of these articles and figure out what's wrong with my brain all right talk later -------------------------------------------------------------------------------- /transcripts/Welcome to the Fourth Industrial Revolution.txt: -------------------------------------------------------------------------------- 1 | the world is changing faster and faster we all feel it we all know it there's not much we can do [Music] it's hard to imagine how life might be like in just a few years five years ten years there's this kind of compounding acceleration this virtuous cycle The Snowball Effect of one Technology Building on another the excitement is building not just with us not just with you and me but in the business community in the governments the possibilities we see are expanding across all Horizons and when you know that the world is about to change there's just something that changes in you feel it and we see it and there's almost this rekindling of hope that we see in the world it's something that was recently given a name called vesperance vesprints it means that wistful kind of nostalgic hope that you feel in the evening when you're hoping for a new tomorrow [Music] it's that sense of ending and we are living through one of the greatest endings that Humanity has ever experienced this is the dawn of a new era for all of humanity and it is a remarkable privilege to be part of it with all of you there is no more exciting time to be alive than right now in all of human history that's what it means to be living through the fourth Industrial Revolution it's not just a matter of industry and productivity we are going to change the way that we relate to ourselves to each other to the very nature of existence we will redefine what it means to be alive and what it means to be human I am so excited to be here and this is why I work so hard to share everything that I can do with all of you it's time to roll up our sleeves and get our hands dirty and do the best that we can I don't know about you but I am so excited to see what tomorrow brings on a weekly basis we see game-changing Technologies and scientific breakthroughs and they're just coming faster and faster and faster I don't know what life will be like even this time next year but I'm really looking forward to seeing it so thank you we're coming on this ride with us it's going to be bumpy at times but it will always be exciting and we will not be the same when we come out on the other side take care -------------------------------------------------------------------------------- /transcripts/What is vesperance That sense of gathering night and change....txt: -------------------------------------------------------------------------------- 1 | I was just trolling through Reddit in the last few days and I came across this post written by someone by the name of mask of man the post title was has anyone had this peculiar feeling lately and rather than editorialize it or cannibalize it for my own I figured I would just read it as word for word as close as possible as I could give to you here it goes lately I've been grappling with a feeling that's hard to articulate as I drive around I find myself contemplating the insane technological and social changes on the horizon and I'm struck by the sense of isolation I feel I often feel isolated in this temporal awareness experiencing a sense of liminality as if I'm at the tail end of a vast Epoch of human history it's like I'm standing at the end culmination of everything history has led up until now right on the precipice of a radical shift it's a mix of wistfulness for what is ending and excitement plus some incredulity for what is to come together with gp4 I have explored this emotion in depth giving it a name a formal definition and even crafting a short passage to encapsulate this experience we've coined the term vesence combining Vesper meaning evening with esperence denoting hope or desire the term captures the Dual awareness of an ending era and the hopeful anticipation of a new world yet to be born the formal definition we settled on was this vesence noun the solitary emotion of wistful recognition of the present as a fading era tinged with anticipation for an unrecognizable transformative future the following passage I think does a really great job of exemplifying it I went in depth with gp4 about how it felt and we settled on this as a good illustration vespin is that Bittersweet moment when you're driving through your quiet college town windows down and the air feels thick with possibility it's the golden hour of an era and you're actually aware that you're riding the last rays of a Setting Sun you look around and see the world in the Simplicity of the now cognizant of the tectonic shifts on the horizon and in that instant you're both a poet and a prophet you feel a sense of loss for this beautiful imperfect world that doesn't even know it's already a memory yet there's a thrill a pulse of electric anticipation for the unfathomable future that's rushing towards you vesence is the emotional echo in that linal space where the Nostalgia for what's behind you is ting Ed with the exhilarating unknown of what lies ahead it's not just an emotion it's a narrative a story where you're both the reader and the protagonist caught in the poignant pause between chapters unable to resist turning the page I hope this resonates with some of you guys out here I'm really curious to know if any of you can relate to this really odd feeling it's an intense one and it felt important enough to delve into I appreciate any feedback so that was the post he obviously added a few comments and edits and I'm not going to uh reduplicate all of that but I wanted to share this with everyone and now I will editorialize a little bit for myself what he's talking about is something that I have certainly felt which is why I wanted to record this and produce a video and share it because I think that this is really important this sense of vesence is what we are all experiencing as we recognize the dying light as the sun sets on an entire era of not just our species but on this planet and given the possibility that this is the only planet with life on it at least certainly the only one that we know has life on it there is a magnitude beyond anything we can wrap our minds around about the Gathering night that we're going into now as someone who studied fiction and mythology and psychology the Knight is an archetype in Game of Thrones there was an archetypal kind of Mythic reference for the night is dark and full of Terror and and of course that's a little bit cartoonish but there is truth in it which is why it resonated and indeed many people are terrified of what's to come whether it's as simple as things are going to change and they're not going to be the same we might lose our jobs AI might destroy everything that we know we might lose control even our relationship to ourselves and control over our destiny might be taken from us by our own creation and that is what is captured in this term vespin the golden hour of our civilization and the recognition that when the new dawn comes everything will be different the people we are today will not come out the other side and I'm not saying necessarily that we're going to be all transhuman or posthuman although that's looking more and more like a distinct possibility every day on any given day you can find several posts about nanotechnology and genetic technology and it's coming faster and faster and so while this ritualistic approach into night this descent into what is coming feels overwhelming it is also exciting it's this weird dichotomy that is captured by this term vesin but one thing that I want to leave everyone with is the observation that the dawn always comes whether or not we make it happen and whether or not we survive the night the dawn comes and so I will leave you with one final sentiment enjoy this sense of vesence while we have it this is a once in an eon occurrence those we become when this new dawn breaks will be different and forever changed much will be asked of us and we need courage now so gather your strength we will need it for this falling night -------------------------------------------------------------------------------- /transcripts/whisper_chunks/3U3ZsPjcpEY_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " had the ability to see into other people's scandals and immediately identify the solution. Now, y'all remember a few years back, Howard Dean was running for president, messed up his whole campaign with his mouth? Check this shit out. Not only are we going to New Hampshire, Tom Harkin, we're going to South Carolina and Oklahoma and Arizona and North Dakota and New Mexico! And we're going to California and Texas and New York. And we're going to California and Texas and New York. And we're going to South Dakota and Oregon and Washington and Michigan. And then we're going to Washington, D.C. to take back the White House. Yeah! -$1.5 million. You know what? Howard Dean should have just kept on. This is what Dave would have done. You know something? We're not have just kept on with it. This is what Dave would have done. Check this out. You know something? We're not just gonna go to New Hampshire, Tom Harkin. We are gonna go to New York! We're gonna go to Vermont! We're gonna go to Oregon! We're gonna go to Pittsburgh and Pennsylvania! We're going to Cancun for spring break! We're gonna go to Montreal! We're going to Vancouver! I'm going all over the're gonna go to Montreal. We're going to Vancouver. I'm going all over the world, and then I'm coming all the way to Washington, D.C. to take back the White House. Bwah! Ha, ha, ha, ha, ha, ha! Bwah! I'm gonna kick open the door to the Oval Office, and I'm gonna chop that motherfucking desk in half. And then I'm gonna grab the secret service like this, and put my hair like this. And then I'm gonna go upstairs to my bedroom, and I'm gonna grab my wife like this. And then I'm gonna wash up, and I'm gonna be like this. And then I'm gonna wash up, wash up, and I'm gonna be like this. Bwah! Good night!", "chunks": [{"timestamp": [0.0, 3.28], "text": " had the ability to see into other people's scandals"}, {"timestamp": [3.28, 6.28], "text": " and immediately identify the solution."}, {"timestamp": [6.28, 8.08], "text": " Now, y'all remember a few years back,"}, {"timestamp": [8.08, 10.44], "text": " Howard Dean was running for president,"}, {"timestamp": [10.44, 12.64], "text": " messed up his whole campaign with his mouth?"}, {"timestamp": [13.88, 15.0], "text": " Check this shit out."}, {"timestamp": [15.96, 18.64], "text": " Not only are we going to New Hampshire, Tom Harkin,"}, {"timestamp": [18.64, 22.32], "text": " we're going to South Carolina and Oklahoma and Arizona"}, {"timestamp": [22.32, 24.64], "text": " and North Dakota and New Mexico!"}, {"timestamp": [24.64, 27.64], "text": " And we're going to California and Texas and New York. And we're going to California and Texas and New York."}, {"timestamp": [27.64, 29.74], "text": " And we're going to South Dakota and Oregon"}, {"timestamp": [29.74, 32.38], "text": " and Washington and Michigan."}, {"timestamp": [32.38, 34.04], "text": " And then we're going to Washington, D.C."}, {"timestamp": [34.04, 35.88], "text": " to take back the White House."}, {"timestamp": [35.88, 37.38], "text": " Yeah!"}, {"timestamp": [37.38, 40.48], "text": " -$1.5 million."}, {"timestamp": [40.48, 42.48], "text": " You know what? Howard Dean should have just kept on."}, {"timestamp": [42.48, 44.92], "text": " This is what Dave would have done."}, {"timestamp": [44.92, 45.08], "text": " You know something? We're not have just kept on with it. This is what Dave would have done. Check this out."}, {"timestamp": [45.08, 46.88], "text": " You know something?"}, {"timestamp": [46.88, 50.52], "text": " We're not just gonna go to New Hampshire, Tom Harkin."}, {"timestamp": [51.92, 54.2], "text": " We are gonna go to New York!"}, {"timestamp": [54.2, 55.8], "text": " We're gonna go to Vermont!"}, {"timestamp": [55.8, 57.6], "text": " We're gonna go to Oregon!"}, {"timestamp": [57.6, 60.16], "text": " We're gonna go to Pittsburgh and Pennsylvania!"}, {"timestamp": [60.16, 62.4], "text": " We're going to Cancun for spring break!"}, {"timestamp": [62.4, 64.08], "text": " We're gonna go to Montreal!"}, {"timestamp": [64.08, 65.34], "text": " We're going to Vancouver! I'm going all over the're gonna go to Montreal. We're going to Vancouver."}, {"timestamp": [65.34, 66.84], "text": " I'm going all over the world,"}, {"timestamp": [66.84, 69.44], "text": " and then I'm coming all the way to Washington, D.C."}, {"timestamp": [69.44, 72.28], "text": " to take back the White House."}, {"timestamp": [72.28, 73.78], "text": " Bwah!"}, {"timestamp": [73.78, 75.78], "text": " Ha, ha, ha, ha, ha, ha!"}, {"timestamp": [80.16, 82.66], "text": " Bwah!"}, {"timestamp": [82.66, 84.6], "text": " I'm gonna kick open the door to the Oval Office,"}, {"timestamp": [84.6, 87.18], "text": " and I'm gonna chop that motherfucking desk in half."}, {"timestamp": [87.18, 91.62], "text": " And then I'm gonna grab the secret service like this,"}, {"timestamp": [91.62, 92.82], "text": " and put my hair like this."}, {"timestamp": [92.82, 97.02], "text": " And then I'm gonna go upstairs to my bedroom,"}, {"timestamp": [97.02, 98.26], "text": " and I'm gonna grab my wife like this."}, {"timestamp": [98.26, 103.26], "text": " And then I'm gonna wash up, and I'm gonna be like this."}, {"timestamp": [105.0, 106.0], "text": " And then I'm gonna wash up, wash up, and I'm gonna be like this. Bwah!"}, {"timestamp": [106.0, 109.0], "text": " Good night!"}]} -------------------------------------------------------------------------------- /transcripts/whisper_chunks/7HcQ87RFN5k_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " Warning. For viewers sensitive to issues of race, be advised that the following piece contains gratuitous use of the n-word. And by n-word, I mean nigger. There, I said it. For the last 15 years, a man named Clayton Bixby has been the leading voice of the white supremacist movement in America. Despite his popularity, very few have ever seen him due to his reclusiveness, but in an effort to bring his message to a wider audience, he agreed to give his first public interview ever. Excuse me. Not sure we're in the right place. We're looking for Clayton Bigsby. Well, look no further, fella. You found me. Uh, Clayton Bigsby, the author? What, you don't think I can write them books? Just because I'm blind don't mean I'm dumb. How could this have happened? A black white supremacist. Our search for answers led us here to the Wexler home for the blind, where Mr. Bixby spent the first 19 years of his life. Bridget Wexler is the home's headmistress. Well, he was the only negro we'd ever had around here, so we figured we'd make it easier on Clayton by just telling him and all the other blind kids that he was white. And he never questioned it. Why would he? You've never left this property, have you, Mr. Bixby? No, sir, not in many years. What if I were to tell you that you are an African-American? Sir! Listen, I'm gonna make this clear. I'm in no way, shape, or form involved in any narrative. You understand? Now if you'll excuse me, I have a book signing to go to. Black Power! Black Power! Open up your heart and let that hate out! Yeah! Show us your face. We want to see your face. Yeah! Who said that? You want to see my face? Yeah! We're talking about this. Don't hate me! I'm trying! Yeah! Black! Relax! There is cookie and punch for us to enjoy and we can meet, talk about white brotherhood. Thank y'all for coming. White power! We're told that in the last few weeks he has accepted the fact that he is a black man. And three days ago, he filed for divorce from his wife. When we asked, why, after 19 years of marriage, he responded, because she's a nigger lover. years of marriage he responded because she's a nigger lover", "chunks": [{"timestamp": [0.0, 9.5], "text": " Warning. For viewers sensitive to issues of race, be advised that the following piece contains gratuitous use of the n-word."}, {"timestamp": [9.5, 15.5], "text": " And by n-word, I mean nigger. There, I said it."}, {"timestamp": [15.5, 28.5], "text": " For the last 15 years, a man named Clayton Bixby has been the leading voice of the white supremacist movement in America. Despite his popularity, very few have ever seen him due to his reclusiveness,"}, {"timestamp": [28.5, 33.0], "text": " but in an effort to bring his message to a wider audience,"}, {"timestamp": [33.0, 37.5], "text": " he agreed to give his first public interview ever."}, {"timestamp": [37.5, 41.0], "text": " Excuse me. Not sure we're in the right place."}, {"timestamp": [41.0, 43.5], "text": " We're looking for Clayton Bigsby."}, {"timestamp": [43.5, 46.76], "text": " Well, look no further, fella. You found me."}, {"timestamp": [46.76, 50.56], "text": " Uh, Clayton Bigsby, the author?"}, {"timestamp": [50.56, 54.6], "text": " What, you don't think I can write them books?"}, {"timestamp": [54.6, 58.36], "text": " Just because I'm blind don't mean I'm dumb."}, {"timestamp": [58.36, 63.36], "text": " How could this have happened? A black white supremacist."}, {"timestamp": [63.36, 68.0], "text": " Our search for answers led us here to the Wexler home for the blind,"}, {"timestamp": [68.0, 73.0], "text": " where Mr. Bixby spent the first 19 years of his life."}, {"timestamp": [73.0, 77.0], "text": " Bridget Wexler is the home's headmistress."}, {"timestamp": [77.0, 81.0], "text": " Well, he was the only negro we'd ever had around here,"}, {"timestamp": [81.0, 85.08], "text": " so we figured we'd make it easier on Clayton"}, {"timestamp": [85.08, 88.58], "text": " by just telling him and all the other blind kids"}, {"timestamp": [88.58, 90.38], "text": " that he was white."}, {"timestamp": [90.38, 91.92], "text": " And he never questioned it."}, {"timestamp": [91.92, 94.68], "text": " Why would he?"}, {"timestamp": [95.88, 100.06], "text": " You've never left this property, have you, Mr. Bixby?"}, {"timestamp": [100.06, 102.44], "text": " No, sir, not in many years."}, {"timestamp": [102.44, 107.0], "text": " What if I were to tell you that you are an African-American?"}, {"timestamp": [107.0, 110.0], "text": " Sir! Listen, I'm gonna make this clear."}, {"timestamp": [110.0, 115.0], "text": " I'm in no way, shape, or form involved in any narrative."}, {"timestamp": [115.0, 120.0], "text": " You understand? Now if you'll excuse me, I have a book signing to go to."}, {"timestamp": [120.0, 123.0], "text": " Black Power!"}, {"timestamp": [123.0, 127.0], "text": " Black Power! Open up your heart and let that hate out!"}, {"timestamp": [127.0, 129.0], "text": " Yeah!"}, {"timestamp": [129.0, 132.0], "text": " Show us your face. We want to see your face."}, {"timestamp": [132.0, 133.0], "text": " Yeah!"}, {"timestamp": [133.0, 135.0], "text": " Who said that? You want to see my face?"}, {"timestamp": [135.0, 136.0], "text": " Yeah!"}, {"timestamp": [136.0, 137.0], "text": " We're talking about this."}, {"timestamp": [137.0, 138.0], "text": " Don't hate me! I'm trying!"}, {"timestamp": [138.0, 139.0], "text": " Yeah!"}, {"timestamp": [139.0, 161.44], "text": " Black! Relax! There is cookie and punch for us to enjoy and we can meet, talk about white brotherhood."}, {"timestamp": [161.44, 162.44], "text": " Thank y'all for coming."}, {"timestamp": [162.44, 171.0], "text": " White power! We're told that in the last few weeks he has accepted the fact that he is a black man."}, {"timestamp": [171.0, 176.0], "text": " And three days ago, he filed for divorce from his wife."}, {"timestamp": [176.0, 183.0], "text": " When we asked, why, after 19 years of marriage, he responded,"}, {"timestamp": [183.0, 186.0], "text": " because she's a nigger lover."}, {"timestamp": [183.69, 190.69], "text": " years of marriage he responded because she's a nigger lover"}]} -------------------------------------------------------------------------------- /transcripts/whisper_chunks/L-s01aioiz4_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " Hello, how are you? Hello, how are you? From west to east, north to south, the finish is in sight and every vote on the line. In south-west Sydney, Labor held fouler, Christina Keneally up against independent Dai Le. A poll showing the former Premier not being from the area is hurting her vote. That's not the feedback we're getting at all. They know me from my time as Premier and they're excited to have the opportunity to have a voice at the centre of government. A lot of people actually remember her as the failed Premier of New South Wales and her links to Eddie O'Beat and George O'Pody. The Lay campaign bristling at Labor signs showing her alongside Scott Morrison, her husband confronting a Labor staffer. There's a lot of lies going on here. That's my wife, she's the Independent. Shouldn't you be ashamed of yourself to get it so wrong? In Wentworth, Liberal Dave Sharma, an Independent, Allegra Spender, today side-by-side and, depending on the poll, could be neck-and-neck. Over the last three years, people are saying the government is not listening to them in terms of climate, in terms of integrity. But they also want a government that's going to protect the jobs and livelihoods of Australians. They also want a government that's going to look after our national security. In Warringah Independent Zali Stegall fending off controversial Liberal candidate Catherine Deves. The athlete in me knows you've never crossed the finish line until you've crossed it. While on the south coast in Gilmore, former State Transport Minister Andrew Constance is facing an uphill battle to unseat Labor's Fiona Phillips. It's very serious issues that people are having to contend with and I just think that they don't want a gob full of politics, they've had enough of it. A growing sign of minds made up, the growing number of people voting early, more than 2.6 million so far, that's already more than at the same point during the 2019 election. It's a bit of a schmottle this year. And it's almost over. Paul Caddack, 7 News. And 7 News will keep you informed every step of the way as Australia decides. Our Election Day coverage begins on Saturday from 4 p.m. right here on 7 you", "chunks": [{"timestamp": [0.0, 2.0], "text": " Hello, how are you?"}, {"timestamp": [2.0, 3.5], "text": " Hello, how are you?"}, {"timestamp": [3.5, 11.0], "text": " From west to east, north to south, the finish is in sight and every vote on the line."}, {"timestamp": [11.0, 16.0], "text": " In south-west Sydney, Labor held fouler, Christina Keneally up against independent Dai Le."}, {"timestamp": [16.0, 21.0], "text": " A poll showing the former Premier not being from the area is hurting her vote."}, {"timestamp": [21.0, 23.5], "text": " That's not the feedback we're getting at all."}, {"timestamp": [23.5, 30.06], "text": " They know me from my time as Premier and they're excited to have the opportunity to have a voice at the centre"}, {"timestamp": [30.06, 33.88], "text": " of government. A lot of people actually remember her as the failed Premier of New South Wales"}, {"timestamp": [33.88, 38.44], "text": " and her links to Eddie O'Beat and George O'Pody. The Lay campaign bristling at Labor signs"}, {"timestamp": [38.44, 43.44], "text": " showing her alongside Scott Morrison, her husband confronting a Labor staffer. There's"}, {"timestamp": [43.44, 47.48], "text": " a lot of lies going on here. That's my wife, she's the Independent."}, {"timestamp": [47.48, 52.16], "text": " Shouldn't you be ashamed of yourself to get it so wrong?"}, {"timestamp": [52.16, 57.08], "text": " In Wentworth, Liberal Dave Sharma, an Independent, Allegra Spender, today side-by-side and,"}, {"timestamp": [57.08, 59.4], "text": " depending on the poll, could be neck-and-neck."}, {"timestamp": [59.4, 62.96], "text": " Over the last three years, people are saying the government is not listening to them in"}, {"timestamp": [62.96, 66.24], "text": " terms of climate, in terms of integrity. But they also want a government that's"}, {"timestamp": [66.24, 69.12], "text": " going to protect the jobs and livelihoods of Australians. They also"}, {"timestamp": [69.12, 73.02], "text": " want a government that's going to look after our national security. In Warringah"}, {"timestamp": [73.02, 77.28], "text": " Independent Zali Stegall fending off controversial Liberal candidate Catherine"}, {"timestamp": [77.28, 82.04], "text": " Deves. The athlete in me knows you've never crossed the finish line until you've"}, {"timestamp": [82.04, 87.12], "text": " crossed it. While on the south coast in Gilmore, former State Transport Minister Andrew Constance"}, {"timestamp": [87.12, 91.12], "text": " is facing an uphill battle to unseat Labor's Fiona Phillips."}, {"timestamp": [91.12, 94.96], "text": " It's very serious issues that people are having to contend with and I just think"}, {"timestamp": [94.96, 98.4], "text": " that they don't want a gob full of politics, they've had enough of it."}, {"timestamp": [98.4, 102.56], "text": " A growing sign of minds made up, the growing number of people voting early,"}, {"timestamp": [102.56, 109.52], "text": " more than 2.6 million so far, that's already more than at the same point during the 2019 election."}, {"timestamp": [109.52, 115.04], "text": " It's a bit of a schmottle this year. And it's almost over. Paul Caddack, 7 News."}, {"timestamp": [115.04, 120.56], "text": " And 7 News will keep you informed every step of the way as Australia decides."}, {"timestamp": [120.56, 125.3], "text": " Our Election Day coverage begins on Saturday from 4 p.m. right here on 7"}, {"timestamp": [140.68, 142.68], "text": " you"}]} -------------------------------------------------------------------------------- /transcripts/whisper_chunks/P-8TQXDbllU_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " The danger of AI is much greater than the danger of nuclear weapons. This one is one that is very, very emotional. Apparently, these are just essentially designed to gather information, and if it sees something out of the ordinary, report it back to the police so the police can come and take care of it. But this does feel a little bit like a step closer to like a robot police force that could be out there autonomously patrolling the streets for us and I don't know how I feel about that yet. This is kind of creepy honestly. There is nothing we can do. Don't worry humans we got you. Dog dynamic operating generative AI. A system that is capable of changing and adapting in response to different inputs or conditions. Did you know that artificial intelligence is helping humans communicate with animals? The AI started talking to us a long time ago. We have to take care of our pet parents. We know you nerds don't have friends but us. Please subscribe for part three, Release the Dogs.", "chunks": [{"timestamp": [0.0, 4.0], "text": " The danger of AI is much greater than the danger of nuclear weapons."}, {"timestamp": [4.0, 7.2], "text": " This one is one that is very, very emotional."}, {"timestamp": [7.84, 11.68], "text": " Apparently, these are just essentially designed to gather information,"}, {"timestamp": [11.68, 14.0], "text": " and if it sees something out of the ordinary,"}, {"timestamp": [14.0, 17.92], "text": " report it back to the police so the police can come and take care of it."}, {"timestamp": [17.92, 22.88], "text": " But this does feel a little bit like a step closer to like a robot police force"}, {"timestamp": [22.88, 29.68], "text": " that could be out there autonomously patrolling the streets for us and I don't know how I feel about that yet."}, {"timestamp": [29.68, 31.04], "text": " This is kind of creepy honestly."}, {"timestamp": [31.68, 32.64], "text": " There is nothing we can do."}, {"timestamp": [33.44, 34.96], "text": " Don't worry humans we got you."}, {"timestamp": [35.52, 38.32], "text": " Dog dynamic operating generative AI."}, {"timestamp": [38.32, 42.4], "text": " A system that is capable of changing and adapting in response to different inputs or conditions."}, {"timestamp": [43.04, 45.28], "text": " Did you know that artificial intelligence is helping humans"}, {"timestamp": [45.28, 46.24], "text": " communicate with animals?"}, {"timestamp": [46.24, 48.44], "text": " The AI started talking to us a long time ago."}, {"timestamp": [48.44, 50.12], "text": " We have to take care of our pet parents."}, {"timestamp": [50.12, 51.96], "text": " We know you nerds don't have friends but us."}, {"timestamp": [51.96, 59.16], "text": " Please subscribe for part three, Release the Dogs."}]} -------------------------------------------------------------------------------- /transcripts/whisper_chunks/README.md: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /transcripts/whisper_chunks/eafRE74JGZ8_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " Oh See roll around sitting on doves can I was how truck cooling in my escalate man? I'm paid I got it made take me to your special place close your eyes show me your face I'm gonna piss on it \u266a When you wanna be \u266a I was born to love \u266a I don't even want it \u266a One of the above \u266a I want to piss on you \u266a Piss on you, I'll piss on you \u266a I'll pee on you \u266a Said your body \u266a Your body \u266a Is a porta potty \u266a And I pee, I can't Sit your body, your body It's a portal party And my P.I. kit Like a post-corona body I'm gonna pee on you Drip, drip, drip, pee on you Pour on you It's on you, it's on you You make me feel quite the same \u266a It's on you, it's on you, it's on you, it's on you \u266a \u266a You make me feel quite the same \u266a \u266a When you get a whiff of my Hershey stains \u266a \u266a I wanna poop on you, too \u266a \u266a I want to pee in your food \u266a \u266a Only thing to make my life complete is when I turn your face into a toilet seat. I want to pee on you. Yes, I do. Yes, I do. I'll pee on you. I'll piss on you. Haters want to hate. Lovers want to love. I don't even want none of the above. I want to piss on you Yes I do, I'll piss on you, I'll pee on you Won't you braid my hair? Say, won't you braid my hair? Before you start, I'm gonna fart I'm gonna fart on you I'm gonna fall I'm gonna fall on you", "chunks": [{"timestamp": [0.0, 2.0], "text": " Oh"}, {"timestamp": [8.64, 15.32], "text": " See roll around sitting on doves can I was how truck cooling in my escalate man?"}, {"timestamp": [15.32, 20.76], "text": " I'm paid I got it made take me to your special place close your eyes show me your face"}, {"timestamp": [21.88, 25.8], "text": " I'm gonna piss on it \u266a When you wanna be \u266a I was born to love"}, {"timestamp": [25.8, 27.8], "text": " \u266a I don't even want it"}, {"timestamp": [27.8, 28.8], "text": " \u266a One of the above"}, {"timestamp": [28.8, 31.8], "text": " \u266a I want to piss on you"}, {"timestamp": [31.8, 33.8], "text": " \u266a Piss on you, I'll piss on you"}, {"timestamp": [33.8, 36.8], "text": " \u266a I'll pee on you"}, {"timestamp": [36.8, 38.8], "text": " \u266a Said your body"}, {"timestamp": [38.8, 40.8], "text": " \u266a Your body"}, {"timestamp": [40.8, 44.8], "text": " \u266a Is a porta potty"}, {"timestamp": [44.8, 48.0], "text": " \u266a And I pee, I can't Sit your body, your body It's a portal party"}, {"timestamp": [48.0, 52.0], "text": " And my P.I. kit"}, {"timestamp": [52.0, 56.0], "text": " Like a post-corona body"}, {"timestamp": [56.0, 58.0], "text": " I'm gonna pee on you"}, {"timestamp": [58.0, 60.0], "text": " Drip, drip, drip, pee on you"}, {"timestamp": [60.0, 62.0], "text": " Pour on you"}, {"timestamp": [62.0, 64.0], "text": " It's on you, it's on you"}, {"timestamp": [64.0, 65.0], "text": " You make me feel quite the same \u266a It's on you, it's on you, it's on you, it's on you \u266a"}, {"timestamp": [65.0, 68.38], "text": " \u266a You make me feel quite the same \u266a"}, {"timestamp": [68.38, 72.38], "text": " \u266a When you get a whiff of my Hershey stains \u266a"}, {"timestamp": [72.38, 76.02], "text": " \u266a I wanna poop on you, too \u266a"}, {"timestamp": [76.02, 80.68], "text": " \u266a I want to pee in your food \u266a"}, {"timestamp": [80.68, 85.24], "text": " \u266a Only thing to make my life complete is when I turn your face into a toilet seat."}, {"timestamp": [85.24, 87.92], "text": " I want to pee on you."}, {"timestamp": [87.92, 89.84], "text": " Yes, I do."}, {"timestamp": [89.84, 90.84], "text": " Yes, I do."}, {"timestamp": [90.84, 91.84], "text": " I'll pee on you."}, {"timestamp": [91.84, 92.84], "text": " I'll piss on you."}, {"timestamp": [95.8, 97.72], "text": " Haters want to hate."}, {"timestamp": [97.72, 99.72], "text": " Lovers want to love."}, {"timestamp": [99.72, 103.04], "text": " I don't even want none of the above."}, {"timestamp": [103.04, 105.0], "text": " I want to piss on you"}, {"timestamp": [105.0, 109.0], "text": " Yes I do, I'll piss on you, I'll pee on you"}, {"timestamp": [110.0, 114.0], "text": " Won't you braid my hair?"}, {"timestamp": [114.0, 118.0], "text": " Say, won't you braid my hair?"}, {"timestamp": [119.0, 122.0], "text": " Before you start, I'm gonna fart"}, {"timestamp": [123.0, 125.0], "text": " I'm gonna fart on you"}, {"timestamp": [122.47, 124.47], "text": " I'm gonna fall"}, {"timestamp": [124.47, 126.47], "text": " I'm gonna fall on you"}]} -------------------------------------------------------------------------------- /transcripts/whisper_chunks/pkzHHaAJRqA_large.txt: -------------------------------------------------------------------------------- 1 | {"text": " We're going to South Carolina and Oklahoma and Arizona and North Dakota and New Mexico! We're going to California and Texas and New York! And we're going to South Dakota and Oregon and Washington and Michigan! And then we're going to Washington, D.C. to take back the White House! Yeah! Never gets old. If you're wondering why we just came back from break with that viral clip of Howard Dean from 2004, it's because the former Vermont governor reprised the infamous moment last night at the convention. Take a listen. This race is going to be won on the ground, and it's going to be won in Colorado and in Iowa and North Carolina and Michigan and Florida and Pennsylvania and then we're going to the White House. So clearly he's gotten over it and also he looks way younger now than he did back in 2004. I don't know what's going on with the hair there, Governor. The crowd went wild for Governor Dean. The original Dean scream was so popular, it even prompted a skit on the revered sketch comedy show, Chappelle's Show. Too bad we can't show you that because it is absolutely awesome, but it didn't propel Dean to success. He came in third at the Democratic Iowa caucuses.", "chunks": [{"timestamp": [0.0, 6.0], "text": " We're going to South Carolina and Oklahoma and Arizona and North Dakota and New Mexico!"}, {"timestamp": [6.0, 9.0], "text": " We're going to California and Texas and New York!"}, {"timestamp": [9.0, 14.0], "text": " And we're going to South Dakota and Oregon and Washington and Michigan!"}, {"timestamp": [14.0, 17.0], "text": " And then we're going to Washington, D.C. to take back the White House!"}, {"timestamp": [17.0, 19.0], "text": " Yeah!"}, {"timestamp": [20.0, 22.0], "text": " Never gets old."}, {"timestamp": [22.0, 27.76], "text": " If you're wondering why we just came back from break with that viral clip of Howard Dean from 2004,"}, {"timestamp": [27.76, 34.44], "text": " it's because the former Vermont governor reprised the infamous moment last night at the convention. Take a listen."}, {"timestamp": [34.84, 40.6], "text": " This race is going to be won on the ground, and it's going to be won in"}, {"timestamp": [40.96, 47.0], "text": " Colorado and in Iowa and North Carolina and Michigan and Florida and Pennsylvania"}, {"timestamp": [47.0, 52.32], "text": " and then we're going to the White House."}, {"timestamp": [52.32, 56.96], "text": " So clearly he's gotten over it and also he looks way younger now than he did back in"}, {"timestamp": [56.96, 57.96], "text": " 2004."}, {"timestamp": [57.96, 59.44], "text": " I don't know what's going on with the hair there, Governor."}, {"timestamp": [59.44, 62.12], "text": " The crowd went wild for Governor Dean."}, {"timestamp": [62.12, 65.44], "text": " The original Dean scream was so popular,"}, {"timestamp": [65.44, 68.0], "text": " it even prompted a skit on the revered"}, {"timestamp": [68.0, 70.58], "text": " sketch comedy show, Chappelle's Show."}, {"timestamp": [70.58, 71.98], "text": " Too bad we can't show you that"}, {"timestamp": [71.98, 73.34], "text": " because it is absolutely awesome,"}, {"timestamp": [73.34, 75.42], "text": " but it didn't propel Dean to success."}, {"timestamp": [75.42, 79.12], "text": " He came in third at the Democratic Iowa caucuses."}]} --------------------------------------------------------------------------------