├── CODEOWNERS ├── authors ├── chetan-thote.toml ├── bharath-swamy.toml ├── singlestore.toml └── madhukar-kumar.toml ├── common └── images │ ├── caution.png │ ├── banner-colors.png │ ├── header-icons │ ├── map.png │ ├── binary.png │ ├── bolt.png │ ├── clouds.png │ ├── files.png │ ├── filter.png │ ├── gears.png │ ├── globe.png │ ├── image.png │ ├── laptop.png │ ├── link.png │ ├── lock.png │ ├── notes.png │ ├── radar.png │ ├── rocket.png │ ├── server.png │ ├── shield.png │ ├── shop.png │ ├── star.png │ ├── users.png │ ├── browser.png │ ├── database.png │ ├── id-card.png │ ├── pipeline.png │ ├── waveform.png │ ├── arrows-spin.png │ ├── bolt-no-bg.png │ ├── file-export.png │ ├── files-no-bg.png │ ├── gears-no-bg.png │ ├── globe-no-bg.png │ ├── handshake.png │ ├── image-no-bg.png │ ├── link-no-bg.png │ ├── lock-no-bg.png │ ├── map-no-bg.png │ ├── megaphone.png │ ├── notes-no-bg.png │ ├── radar-no-bg.png │ ├── shop-no-bg.png │ ├── star-no-bg.png │ ├── user-plus.png │ ├── users-no-bg.png │ ├── binary-no-bg.png │ ├── browser-no-bg.png │ ├── camera-movie.png │ ├── chart-network.png │ ├── chart-scatter.png │ ├── clouds-no-bg.png │ ├── crystal-ball.png │ ├── dollar-circle.png │ ├── filter-no-bg.png │ ├── id-card-no-bg.png │ ├── laptop-no-bg.png │ ├── lightbulb-on.png │ ├── location-dots.png │ ├── message-dots.png │ ├── nodes-circle.png │ ├── rocket-no-bg.png │ ├── server-no-bg.png │ ├── shield-no-bg.png │ ├── shopping-bag.png │ ├── shopping-cart.png │ ├── vector-circle.png │ ├── block-question.png │ ├── book-open-cover.png │ ├── calendar-check.png │ ├── confluent-logo.png │ ├── database-no-bg.png │ ├── face-viewfinder.png │ ├── handshake-no-bg.png │ ├── megaphone-no-bg.png │ ├── pipeline-no-bg.png │ ├── user-plus-no-bg.png │ ├── waveform-no-bg.png │ ├── arrows-spin-no-bg.png │ ├── camera-movie-no-bg.png │ ├── crystal-ball-no-bg.png │ ├── file-export-no-bg.png │ ├── lightbulb-on-no-bg.png │ ├── memo-circle-check.png │ ├── message-dots-no-bg.png │ ├── nodes-circle-no-bg.png │ ├── screwdriver-wrench.png │ ├── shopping-bag-no-bg.png │ ├── arrow-up-right-dots.png │ ├── block-question-no-bg.png │ ├── book-open-cover-no-bg.png │ ├── calendar-check-no-bg.png │ ├── chart-network-no-bg.png │ ├── chart-scatter-no-bg.png │ ├── confluent-logo-no-bg.png │ ├── dollar-circle-no-bg.png │ ├── face-viewfinder-no-bg.png │ ├── location-dots-no-bg.png │ ├── shopping-cart-no-bg.png │ ├── vector-circle-no-bg.png │ ├── memo-circle-check-no-bg.png │ ├── arrow-up-right-dots-no-bg.png │ └── screwdriver-wrench-no-bg.png │ ├── singlestore-logo.png │ ├── singlestore-banner.png │ ├── singlestore-jupyter.png │ ├── banner-colors-reverse.png │ ├── card-header-icons │ ├── ai.png │ ├── kai.png │ ├── ingest.png │ ├── notebook.png │ ├── python-sdk.png │ ├── real-time.png │ ├── integrations.png │ ├── data-management.png │ └── query-performance.png │ ├── singlestore-logo-100.png │ ├── singlestore-logo-grey.png │ ├── preview-header-icons │ ├── ai.png │ ├── kai.png │ ├── ingest.png │ ├── notebook.png │ ├── python-sdk.png │ ├── real-time.png │ ├── integrations.png │ ├── data-management.png │ └── query-performance.png │ ├── author-images │ └── singlestore.png │ ├── singlestore-full-logo-grey.png │ ├── singlestore-logo-100-grey.png │ ├── singlestore-logo-vertical.png │ ├── singlestore-full-logo-vertical.png │ ├── singlestore-full-logo-grey.svg │ └── singlestore-logo-grey.svg ├── notebooks ├── insure-gpt-demo │ ├── assets │ │ ├── car-accident.jpg │ │ ├── insurance_claim.pdf │ │ ├── insurance_claim.png │ │ ├── car_repair_estimate.png │ │ ├── car_repair_estimate.csv │ │ └── insurance_claim.txt │ └── meta.toml ├── atlas-and-kai │ ├── images │ │ ├── in-app-analytics.png │ │ ├── mongo-db-singlestore.png │ │ ├── mongo-db-singlestoredb.png │ │ ├── mongo-enabled-workspace.png │ │ └── select-workspace-and-database.png │ └── meta.toml ├── notebook-basics │ ├── images │ │ ├── edit-firewall.png │ │ ├── new-endpoints.png │ │ ├── select-workspace-and-database.png │ │ └── connect-to-an-external-datasource.png │ └── meta.toml ├── resume-evaluator │ ├── images │ │ ├── resume_matcher.jpg │ │ └── resume_matcher_flow_diagram.jpg │ └── meta.toml ├── movie-recommendation │ ├── images │ │ └── database-tables.png │ └── meta.toml ├── cdc-mongodb-sql-commands │ ├── images │ │ └── cdc-in-header.png │ └── meta.toml ├── image-matching-with-sql │ ├── images │ │ └── emma-thompson.png │ └── meta.toml ├── inserting-embeddings-from-multiple-models-into-singlestore-using-external-functions │ ├── requirements.txt │ ├── images │ │ └── architecture.png │ ├── meta.toml │ └── external_function_api.py ├── unified-data-analysis-sql-nosql-kai │ ├── selectdb.png │ ├── banking_analytics.png │ └── meta.toml ├── rag-with-bedrock │ ├── assets │ │ ├── RAG_pipeline_S2_Bedrock.png │ │ └── RAG_runtime_S2_Bedrock.png │ └── meta.toml ├── real-time-anomaly-detection │ ├── images │ │ └── architecture.png │ └── meta.toml ├── confluent-cloud-integration │ ├── images │ │ ├── confluent-api-key.png │ │ ├── kafka-value-schema.png │ │ └── confluent-kafka-integration.png │ └── meta.toml ├── resume-evaluator-ANN-index-search │ ├── images │ │ ├── resume_matcher.jpg │ │ └── resume_matcher_flow_diagram.jpg │ └── meta.toml ├── getting-started-with-mongocdc │ ├── images │ │ └── mongodb-singlestorecloud.gif │ └── meta.toml ├── notebook-style-guide │ └── meta.toml ├── singlestore-cheat-sheet │ └── meta.toml ├── semantic-search-with-hugging-face │ ├── README.md │ └── meta.toml ├── vector-database-basics │ └── meta.toml ├── similarity-search-on-vector-data │ └── meta.toml ├── backup-database-s3 │ └── meta.toml ├── migrate-virtual-workspace │ └── meta.toml ├── python-image-udf │ └── meta.toml ├── python-udf-template │ ├── meta.toml │ └── notebook.ipynb ├── restore-database-from-s3 │ └── meta.toml ├── create-dash-app │ └── meta.toml ├── semantic-search-with-openai-qa │ └── meta.toml ├── getting-started-with-ai-functions │ └── meta.toml ├── load-json-files-s3 │ └── meta.toml ├── hybrid-search │ └── meta.toml ├── ml-function-classify │ └── meta.toml ├── getting-started-with-singlestore │ └── meta.toml ├── launch-open-source-apps-with-langchain │ ├── meta.toml │ └── README.md ├── basic-query-examples │ └── meta.toml ├── kebab-case │ ├── meta.toml │ └── notebook.ipynb ├── performance-troubleshooting │ ├── meta.toml │ └── assets │ │ ├── DB_PERFORMANCE_TROUBLESHOOT_QUERIES.csv │ │ └── templates │ │ ├── index.template.html │ │ ├── Result-1.template.html │ │ ├── Result-5.template.html │ │ └── Result-3.template.html ├── getting-started-with-notebooks │ └── meta.toml ├── managing-stage-files-with-fusion-sql │ └── meta.toml ├── vector-search-with-kai │ └── meta.toml ├── load-kafka-template │ └── meta.toml ├── load-s3-files-into-shared-tier │ └── meta.toml ├── semantic-search-with-openai-embedding-creation │ └── meta.toml ├── working-with-vector-data │ └── meta.toml ├── ml-function-anomaly-detect │ └── meta.toml ├── representing-unstructured-data-as-vectors │ ├── meta.toml │ └── notebook.ipynb ├── getting-started-with-dataframes │ └── meta.toml ├── how-to-build-llm-apps-that-can-see-hear-speak │ └── meta.toml ├── integrating-with-pandas │ ├── meta.toml │ └── data │ │ └── iris.csv ├── load-csv-data-s3 │ └── meta.toml ├── load-csv-data-s3-placeholder │ └── meta.toml ├── load-data-json │ └── meta.toml ├── network-intrusion-detection-part-2 │ └── meta.toml ├── building-a-multi-agent-ai-app-with-autogen │ └── meta.toml ├── cloud-functions-template │ └── meta.toml ├── hybrid-full-text-vector-search │ └── meta.toml ├── pipelines-query-tuning │ └── meta.toml ├── load-data-kakfa │ └── meta.toml ├── evaluating-llms-with-uptrain │ └── meta.toml ├── running-notebooks-from-another-notebook-with-fusion-sql │ └── meta.toml ├── searching-all-of-wikipedia │ └── meta.toml ├── getting-started-with-fusion-sql │ └── meta.toml ├── optimize-performance-with-tpch-100 │ └── meta.toml ├── network-intrusion-detection-part-3 │ └── meta.toml ├── ingest-pdfs-with-pdfplumber │ └── meta.toml ├── singlestore-april-challenge-haiku-ascii │ └── meta.toml ├── real-time-recommendation-engine │ ├── meta.toml │ └── singlestore_bundle.pem ├── semantic-visualization-and-vector-datatype │ └── meta.toml └── network-intrusion-detection-part-1 │ └── meta.toml ├── README.md ├── .gitattributes ├── setup.cfg ├── lessons └── it-threat-detection.toml ├── .github └── workflows │ ├── deploy-bifrost.yml │ ├── pre-commit.yml │ └── release-samples.yml ├── meta.toml ├── resources ├── lesson-check.py ├── author-check.py ├── nb-meta-check.py └── package-samples.py ├── .gitignore ├── .pre-commit-config.yaml ├── CONTRIBUTING.md └── CODE_OF_CONDUCT.md /CODEOWNERS: -------------------------------------------------------------------------------- 1 | * @kesmit13 2 | * @tiimgreen 3 | * @mivasconcelos 4 | * @ricardoasmarques 5 | -------------------------------------------------------------------------------- /authors/chetan-thote.toml: -------------------------------------------------------------------------------- 1 | name="Chetan Thote" 2 | title="Product Team" 3 | image="singlestore" 4 | external=false 5 | -------------------------------------------------------------------------------- /authors/bharath-swamy.toml: -------------------------------------------------------------------------------- 1 | name="Bharath Swamy" 2 | title="Product Team" 3 | image="singlestore" 4 | external=false 5 | -------------------------------------------------------------------------------- /authors/singlestore.toml: -------------------------------------------------------------------------------- 1 | name="SingleStore" 2 | title="Engineering Team" 3 | image="singlestore" 4 | external=false 5 | -------------------------------------------------------------------------------- /common/images/caution.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/caution.png -------------------------------------------------------------------------------- /common/images/banner-colors.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/banner-colors.png -------------------------------------------------------------------------------- /common/images/header-icons/map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/map.png -------------------------------------------------------------------------------- /common/images/singlestore-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/singlestore-logo.png -------------------------------------------------------------------------------- /common/images/header-icons/binary.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/binary.png -------------------------------------------------------------------------------- /common/images/header-icons/bolt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/bolt.png -------------------------------------------------------------------------------- /common/images/header-icons/clouds.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/clouds.png -------------------------------------------------------------------------------- /common/images/header-icons/files.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/files.png -------------------------------------------------------------------------------- /common/images/header-icons/filter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/filter.png -------------------------------------------------------------------------------- /common/images/header-icons/gears.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/gears.png -------------------------------------------------------------------------------- /common/images/header-icons/globe.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/globe.png -------------------------------------------------------------------------------- /common/images/header-icons/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/image.png -------------------------------------------------------------------------------- /common/images/header-icons/laptop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/laptop.png -------------------------------------------------------------------------------- /common/images/header-icons/link.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/link.png -------------------------------------------------------------------------------- /common/images/header-icons/lock.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/lock.png -------------------------------------------------------------------------------- /common/images/header-icons/notes.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/notes.png -------------------------------------------------------------------------------- /common/images/header-icons/radar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/radar.png -------------------------------------------------------------------------------- /common/images/header-icons/rocket.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/rocket.png -------------------------------------------------------------------------------- /common/images/header-icons/server.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/server.png -------------------------------------------------------------------------------- /common/images/header-icons/shield.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/shield.png -------------------------------------------------------------------------------- /common/images/header-icons/shop.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/shop.png -------------------------------------------------------------------------------- /common/images/header-icons/star.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/star.png -------------------------------------------------------------------------------- /common/images/header-icons/users.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/users.png -------------------------------------------------------------------------------- /common/images/singlestore-banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/singlestore-banner.png -------------------------------------------------------------------------------- /common/images/singlestore-jupyter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/singlestore-jupyter.png -------------------------------------------------------------------------------- /common/images/banner-colors-reverse.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/banner-colors-reverse.png -------------------------------------------------------------------------------- /common/images/card-header-icons/ai.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/card-header-icons/ai.png -------------------------------------------------------------------------------- /common/images/card-header-icons/kai.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/card-header-icons/kai.png -------------------------------------------------------------------------------- /common/images/header-icons/browser.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/browser.png -------------------------------------------------------------------------------- /common/images/header-icons/database.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/database.png -------------------------------------------------------------------------------- /common/images/header-icons/id-card.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/id-card.png -------------------------------------------------------------------------------- /common/images/header-icons/pipeline.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/pipeline.png -------------------------------------------------------------------------------- /common/images/header-icons/waveform.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/waveform.png -------------------------------------------------------------------------------- /common/images/singlestore-logo-100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/singlestore-logo-100.png -------------------------------------------------------------------------------- /common/images/singlestore-logo-grey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/singlestore-logo-grey.png -------------------------------------------------------------------------------- /common/images/card-header-icons/ingest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/card-header-icons/ingest.png -------------------------------------------------------------------------------- /common/images/header-icons/arrows-spin.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/arrows-spin.png -------------------------------------------------------------------------------- /common/images/header-icons/bolt-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/bolt-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/file-export.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/file-export.png -------------------------------------------------------------------------------- /common/images/header-icons/files-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/files-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/gears-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/gears-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/globe-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/globe-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/handshake.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/handshake.png -------------------------------------------------------------------------------- /common/images/header-icons/image-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/image-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/link-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/link-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/lock-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/lock-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/map-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/map-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/megaphone.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/megaphone.png -------------------------------------------------------------------------------- /common/images/header-icons/notes-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/notes-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/radar-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/radar-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/shop-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/shop-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/star-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/star-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/user-plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/user-plus.png -------------------------------------------------------------------------------- /common/images/header-icons/users-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/users-no-bg.png -------------------------------------------------------------------------------- /common/images/preview-header-icons/ai.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/preview-header-icons/ai.png -------------------------------------------------------------------------------- /common/images/preview-header-icons/kai.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/preview-header-icons/kai.png -------------------------------------------------------------------------------- /common/images/author-images/singlestore.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/author-images/singlestore.png -------------------------------------------------------------------------------- /common/images/card-header-icons/notebook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/card-header-icons/notebook.png -------------------------------------------------------------------------------- /common/images/header-icons/binary-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/binary-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/browser-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/browser-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/camera-movie.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/camera-movie.png -------------------------------------------------------------------------------- /common/images/header-icons/chart-network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/chart-network.png -------------------------------------------------------------------------------- /common/images/header-icons/chart-scatter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/chart-scatter.png -------------------------------------------------------------------------------- /common/images/header-icons/clouds-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/clouds-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/crystal-ball.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/crystal-ball.png -------------------------------------------------------------------------------- /common/images/header-icons/dollar-circle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/dollar-circle.png -------------------------------------------------------------------------------- /common/images/header-icons/filter-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/filter-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/id-card-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/id-card-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/laptop-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/laptop-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/lightbulb-on.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/lightbulb-on.png -------------------------------------------------------------------------------- /common/images/header-icons/location-dots.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/location-dots.png -------------------------------------------------------------------------------- /common/images/header-icons/message-dots.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/message-dots.png -------------------------------------------------------------------------------- /common/images/header-icons/nodes-circle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/nodes-circle.png -------------------------------------------------------------------------------- /common/images/header-icons/rocket-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/rocket-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/server-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/server-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/shield-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/shield-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/shopping-bag.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/shopping-bag.png -------------------------------------------------------------------------------- /common/images/header-icons/shopping-cart.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/shopping-cart.png -------------------------------------------------------------------------------- /common/images/header-icons/vector-circle.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/vector-circle.png -------------------------------------------------------------------------------- /common/images/singlestore-full-logo-grey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/singlestore-full-logo-grey.png -------------------------------------------------------------------------------- /common/images/singlestore-logo-100-grey.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/singlestore-logo-100-grey.png -------------------------------------------------------------------------------- /common/images/singlestore-logo-vertical.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/singlestore-logo-vertical.png -------------------------------------------------------------------------------- /common/images/card-header-icons/python-sdk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/card-header-icons/python-sdk.png -------------------------------------------------------------------------------- /common/images/card-header-icons/real-time.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/card-header-icons/real-time.png -------------------------------------------------------------------------------- /common/images/header-icons/block-question.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/block-question.png -------------------------------------------------------------------------------- /common/images/header-icons/book-open-cover.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/book-open-cover.png -------------------------------------------------------------------------------- /common/images/header-icons/calendar-check.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/calendar-check.png -------------------------------------------------------------------------------- /common/images/header-icons/confluent-logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/confluent-logo.png -------------------------------------------------------------------------------- /common/images/header-icons/database-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/database-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/face-viewfinder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/face-viewfinder.png -------------------------------------------------------------------------------- /common/images/header-icons/handshake-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/handshake-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/megaphone-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/megaphone-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/pipeline-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/pipeline-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/user-plus-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/user-plus-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/waveform-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/waveform-no-bg.png -------------------------------------------------------------------------------- /common/images/preview-header-icons/ingest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/preview-header-icons/ingest.png -------------------------------------------------------------------------------- /common/images/preview-header-icons/notebook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/preview-header-icons/notebook.png -------------------------------------------------------------------------------- /common/images/card-header-icons/integrations.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/card-header-icons/integrations.png -------------------------------------------------------------------------------- /common/images/header-icons/arrows-spin-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/arrows-spin-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/camera-movie-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/camera-movie-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/crystal-ball-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/crystal-ball-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/file-export-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/file-export-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/lightbulb-on-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/lightbulb-on-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/memo-circle-check.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/memo-circle-check.png -------------------------------------------------------------------------------- /common/images/header-icons/message-dots-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/message-dots-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/nodes-circle-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/nodes-circle-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/screwdriver-wrench.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/screwdriver-wrench.png -------------------------------------------------------------------------------- /common/images/header-icons/shopping-bag-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/shopping-bag-no-bg.png -------------------------------------------------------------------------------- /common/images/preview-header-icons/python-sdk.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/preview-header-icons/python-sdk.png -------------------------------------------------------------------------------- /common/images/preview-header-icons/real-time.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/preview-header-icons/real-time.png -------------------------------------------------------------------------------- /common/images/singlestore-full-logo-vertical.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/singlestore-full-logo-vertical.png -------------------------------------------------------------------------------- /notebooks/insure-gpt-demo/assets/car-accident.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/insure-gpt-demo/assets/car-accident.jpg -------------------------------------------------------------------------------- /common/images/card-header-icons/data-management.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/card-header-icons/data-management.png -------------------------------------------------------------------------------- /common/images/header-icons/arrow-up-right-dots.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/arrow-up-right-dots.png -------------------------------------------------------------------------------- /common/images/header-icons/block-question-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/block-question-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/book-open-cover-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/book-open-cover-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/calendar-check-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/calendar-check-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/chart-network-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/chart-network-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/chart-scatter-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/chart-scatter-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/confluent-logo-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/confluent-logo-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/dollar-circle-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/dollar-circle-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/face-viewfinder-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/face-viewfinder-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/location-dots-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/location-dots-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/shopping-cart-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/shopping-cart-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/vector-circle-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/vector-circle-no-bg.png -------------------------------------------------------------------------------- /common/images/preview-header-icons/integrations.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/preview-header-icons/integrations.png -------------------------------------------------------------------------------- /notebooks/atlas-and-kai/images/in-app-analytics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/atlas-and-kai/images/in-app-analytics.png -------------------------------------------------------------------------------- /notebooks/insure-gpt-demo/assets/insurance_claim.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/insure-gpt-demo/assets/insurance_claim.pdf -------------------------------------------------------------------------------- /notebooks/insure-gpt-demo/assets/insurance_claim.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/insure-gpt-demo/assets/insurance_claim.png -------------------------------------------------------------------------------- /notebooks/notebook-basics/images/edit-firewall.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/notebook-basics/images/edit-firewall.png -------------------------------------------------------------------------------- /notebooks/notebook-basics/images/new-endpoints.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/notebook-basics/images/new-endpoints.png -------------------------------------------------------------------------------- /notebooks/resume-evaluator/images/resume_matcher.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/resume-evaluator/images/resume_matcher.jpg -------------------------------------------------------------------------------- /common/images/card-header-icons/query-performance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/card-header-icons/query-performance.png -------------------------------------------------------------------------------- /common/images/header-icons/memo-circle-check-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/memo-circle-check-no-bg.png -------------------------------------------------------------------------------- /common/images/preview-header-icons/data-management.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/preview-header-icons/data-management.png -------------------------------------------------------------------------------- /common/images/header-icons/arrow-up-right-dots-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/arrow-up-right-dots-no-bg.png -------------------------------------------------------------------------------- /common/images/header-icons/screwdriver-wrench-no-bg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/header-icons/screwdriver-wrench-no-bg.png -------------------------------------------------------------------------------- /common/images/preview-header-icons/query-performance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/common/images/preview-header-icons/query-performance.png -------------------------------------------------------------------------------- /notebooks/atlas-and-kai/images/mongo-db-singlestore.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/atlas-and-kai/images/mongo-db-singlestore.png -------------------------------------------------------------------------------- /notebooks/atlas-and-kai/images/mongo-db-singlestoredb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/atlas-and-kai/images/mongo-db-singlestoredb.png -------------------------------------------------------------------------------- /notebooks/insure-gpt-demo/assets/car_repair_estimate.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/insure-gpt-demo/assets/car_repair_estimate.png -------------------------------------------------------------------------------- /notebooks/movie-recommendation/images/database-tables.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/movie-recommendation/images/database-tables.png -------------------------------------------------------------------------------- /notebooks/atlas-and-kai/images/mongo-enabled-workspace.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/atlas-and-kai/images/mongo-enabled-workspace.png -------------------------------------------------------------------------------- /notebooks/cdc-mongodb-sql-commands/images/cdc-in-header.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/cdc-mongodb-sql-commands/images/cdc-in-header.png -------------------------------------------------------------------------------- /notebooks/image-matching-with-sql/images/emma-thompson.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/image-matching-with-sql/images/emma-thompson.png -------------------------------------------------------------------------------- /notebooks/inserting-embeddings-from-multiple-models-into-singlestore-using-external-functions/requirements.txt: -------------------------------------------------------------------------------- 1 | flask==2.0.1 2 | openai==0.10.2 3 | torch==1.8.1 4 | transformers==4.5.1 5 | -------------------------------------------------------------------------------- /notebooks/unified-data-analysis-sql-nosql-kai/selectdb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/unified-data-analysis-sql-nosql-kai/selectdb.png -------------------------------------------------------------------------------- /notebooks/rag-with-bedrock/assets/RAG_pipeline_S2_Bedrock.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/rag-with-bedrock/assets/RAG_pipeline_S2_Bedrock.png -------------------------------------------------------------------------------- /notebooks/rag-with-bedrock/assets/RAG_runtime_S2_Bedrock.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/rag-with-bedrock/assets/RAG_runtime_S2_Bedrock.png -------------------------------------------------------------------------------- /notebooks/real-time-anomaly-detection/images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/real-time-anomaly-detection/images/architecture.png -------------------------------------------------------------------------------- /notebooks/atlas-and-kai/images/select-workspace-and-database.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/atlas-and-kai/images/select-workspace-and-database.png -------------------------------------------------------------------------------- /notebooks/confluent-cloud-integration/images/confluent-api-key.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/confluent-cloud-integration/images/confluent-api-key.png -------------------------------------------------------------------------------- /notebooks/confluent-cloud-integration/images/kafka-value-schema.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/confluent-cloud-integration/images/kafka-value-schema.png -------------------------------------------------------------------------------- /notebooks/notebook-basics/images/select-workspace-and-database.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/notebook-basics/images/select-workspace-and-database.png -------------------------------------------------------------------------------- /notebooks/resume-evaluator/images/resume_matcher_flow_diagram.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/resume-evaluator/images/resume_matcher_flow_diagram.jpg -------------------------------------------------------------------------------- /notebooks/unified-data-analysis-sql-nosql-kai/banking_analytics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/unified-data-analysis-sql-nosql-kai/banking_analytics.png -------------------------------------------------------------------------------- /notebooks/resume-evaluator-ANN-index-search/images/resume_matcher.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/resume-evaluator-ANN-index-search/images/resume_matcher.jpg -------------------------------------------------------------------------------- /notebooks/notebook-basics/images/connect-to-an-external-datasource.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/notebook-basics/images/connect-to-an-external-datasource.png -------------------------------------------------------------------------------- /notebooks/confluent-cloud-integration/images/confluent-kafka-integration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/confluent-cloud-integration/images/confluent-kafka-integration.png -------------------------------------------------------------------------------- /notebooks/getting-started-with-mongocdc/images/mongodb-singlestorecloud.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/getting-started-with-mongocdc/images/mongodb-singlestorecloud.gif -------------------------------------------------------------------------------- /notebooks/resume-evaluator-ANN-index-search/images/resume_matcher_flow_diagram.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/resume-evaluator-ANN-index-search/images/resume_matcher_flow_diagram.jpg -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SingleStore Spaces 2 | 3 | A collection of sample Jupyter notebooks for use with SingleStoreDB. 4 | -------------------------------------------------------------------------------- /authors/madhukar-kumar.toml: -------------------------------------------------------------------------------- 1 | name="Madhukar Kumar" 2 | title="CMO" 3 | image="https://images.contentstack.io/v3/assets/bltac01ee6daa3a1e14/blt900cdf519ae836e0/64186f8bcf9d2107aeec121a/Madhukar-Kumar.png?width=100&disable=upscale&auto=webp" 4 | external=false 5 | -------------------------------------------------------------------------------- /notebooks/insure-gpt-demo/assets/car_repair_estimate.csv: -------------------------------------------------------------------------------- 1 | Car Part,Estimated Cost (USD) 2 | Front Bumper,300 3 | Hood,450 4 | Front Grille,200 5 | Headlights,250 6 | Radiator,500 7 | Engine Repair,1500 8 | Front Fenders,350 9 | Windshield,300 10 | Total Estimate,3850 11 | -------------------------------------------------------------------------------- /notebooks/notebook-style-guide/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Notebook Style Guide" 4 | description="Become a pro at formatting and organizing your notebooks." 5 | icon="link" 6 | tags=[] 7 | lesson_areas=[] 8 | destinations=[] 9 | minimum_tier="standard" 10 | -------------------------------------------------------------------------------- /notebooks/inserting-embeddings-from-multiple-models-into-singlestore-using-external-functions/images/architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/singlestore-labs/spaces-notebooks/HEAD/notebooks/inserting-embeddings-from-multiple-models-into-singlestore-using-external-functions/images/architecture.png -------------------------------------------------------------------------------- /notebooks/singlestore-cheat-sheet/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["madhukar-kumar"] 3 | title = "SingleStore Cheat Sheet" 4 | description = "Get started with SingleStore quickly with common commands" 5 | icon = "radar" 6 | difficulty="beginner" 7 | tags = ["starter"] 8 | lesson_areas=[] 9 | destinations = ["spaces"] 10 | minimum_tier="free-shared" 11 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | * text=auto 2 | # enforce text on certain files 3 | *.py text 4 | *.pyx text 5 | *.pyd text 6 | *.c text 7 | *.h text 8 | *.html text 9 | *.csv text 10 | *.json text 11 | *.pickle binary 12 | *.h5 binary 13 | *.dta binary 14 | *.xls binary 15 | *.xlsx binary 16 | *.wasm binary 17 | *.bat text eol=crlf 18 | *.html text 19 | *.witx text 20 | -------------------------------------------------------------------------------- /notebooks/semantic-search-with-hugging-face/README.md: -------------------------------------------------------------------------------- 1 | This notebook will perform an AI powered semantic search against a movie dataset in SingleStore. 2 | To follow along with this demo, download this notebook. 3 | 1. Go to singlestore.com/trial and create an account. 4 | 2. Create a workspace – S00 is sufficient 5 | 3. Navigate to notebooks and upload this .ipynb file 6 | -------------------------------------------------------------------------------- /notebooks/insure-gpt-demo/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="InsureGPT: OpenAI and SingleStore for Insurance" 4 | description="Creating an insurance analysis with OpenAI and vectorizing the results into SingleStore." 5 | difficulty="intermediate" 6 | tags=["openai", "langchain"] 7 | lesson_areas=["AI"] 8 | icon="database" 9 | minimum_tier="standard" 10 | -------------------------------------------------------------------------------- /notebooks/vector-database-basics/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="A Deep Dive Into Vector Databases" 4 | description="Using SingleStoreDB as a vector database and vector database use cases." 5 | difficulty="beginner" 6 | tags=["vector", "embeddings"] 7 | lesson_areas=["AI"] 8 | icon="database" 9 | destinations=["spaces"] 10 | minimum_tier="standard" 11 | -------------------------------------------------------------------------------- /notebooks/atlas-and-kai/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Mongo Atlas & SingleStore Kai" 4 | description="""\ 5 | Run your MongoDB queries on SingleStore Kai with no code change. \ 6 | """ 7 | icon="database" 8 | difficulty="beginner" 9 | tags=["starter", "mongo", "kai"] 10 | lesson_areas=["Kai"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/similarity-search-on-vector-data/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Similarity Search on Vector Data" 4 | description="Example of similarity searches over vector data." 5 | difficulty="beginner" 6 | tags=["starter", "openai", "genai", "vectordb"] 7 | lesson_areas=["AI"] 8 | destinations=["spaces"] 9 | icon="vector-circle" 10 | minimum_tier="free-shared" 11 | -------------------------------------------------------------------------------- /notebooks/backup-database-s3/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title = "Backup Database to AWS S3" 4 | description = """\ 5 | Performs backup either single or multiple databases to AWS S3. \ 6 | """ 7 | icon = "database" 8 | difficulty="beginner" 9 | tags = ["starter", "backup"] 10 | lesson_areas=["Data Management"] 11 | destinations = ["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/migrate-virtual-workspace/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Migrate Your Virtual Workspace" 4 | description="Learn how to migrate Your VirtualWorkspace to a full-fledged Workspace in a dedicated Workspace Group." 5 | icon="browser" 6 | difficulty="intermediate" 7 | tags=["starter", "notebooks"] 8 | lesson_areas=[] 9 | destinations=["spaces"] 10 | minimum_tier="standard" 11 | -------------------------------------------------------------------------------- /notebooks/notebook-basics/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="SingleStoreDB Notebook Basics" 4 | description="""\ 5 | Foundational guide to notebooks with essential tips and tricks. \ 6 | """ 7 | icon="notes" 8 | difficulty="beginner" 9 | tags=["starter", "python", "dataframes"] 10 | lesson_areas=["Python SDK"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/python-image-udf/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["bharath-swamy"] 3 | title="Python UDF to process images" 4 | description="""\ 5 | Learn how to integrate with Multimodal LLMs \ 6 | and run it from a Python UDF. 7 | """ 8 | icon="browser" 9 | difficulty="beginner" 10 | tags=["advanced", "notebooks", "python"] 11 | lesson_areas=["AI"] 12 | destinations=["spaces"] 13 | minimum_tier="standard" 14 | -------------------------------------------------------------------------------- /notebooks/python-udf-template/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["bharath-swamy"] 3 | title="Run your first Python UDF" 4 | description="""\ 5 | Learn how to connect to create and\ 6 | publish a python UDF and call it in SQL. 7 | """ 8 | icon="browser" 9 | difficulty="beginner" 10 | tags=["starter", "notebooks", "python"] 11 | lesson_areas=[] 12 | destinations=["spaces"] 13 | minimum_tier="free-shared" 14 | -------------------------------------------------------------------------------- /notebooks/restore-database-from-s3/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title = "Restore Database from AWS S3" 4 | description = """\ 5 | This notebook will help you perform database restore from AWS S3. 6 | """ 7 | icon = "database" 8 | difficulty="beginner" 9 | tags = ["starter", "admin", "restore"] 10 | lesson_areas=["Data Management"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = spaces_notebooks 3 | long_description = file: README.md 4 | long_description_content_type = text/markdown 5 | license = Apache-2.0 6 | license_file = LICENSE.txt 7 | classifiers = 8 | License :: OSI Approved :: Apache Software License 9 | 10 | [flake8] 11 | exclude = 12 | docs/* 13 | resources/* 14 | licenses/* 15 | max-complexity = 30 16 | max-line-length = 90 17 | -------------------------------------------------------------------------------- /notebooks/create-dash-app/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Publish your first SingleStore DashApp" 4 | description="""\ 5 | Learn how to connect to SingleStoreDB\ 6 | and publish an interactive Dashboard. 7 | """ 8 | icon="browser" 9 | difficulty="beginner" 10 | tags=["starter", "notebooks", "python"] 11 | lesson_areas=[] 12 | destinations=["spaces"] 13 | minimum_tier="free-shared" 14 | -------------------------------------------------------------------------------- /notebooks/getting-started-with-mongocdc/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Getting Started With CDC Replication from MongoDB" 4 | description="""\ 5 | Setup Zero ETL data replication from MongoDB to SingleStore \ 6 | """ 7 | icon="pipeline" 8 | difficulty="beginner" 9 | tags=["cdc", "mongo", "kai"] 10 | lesson_areas=["Data Management"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/semantic-search-with-openai-qa/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Semantic Search with OpenAI QA" 4 | description="""\ 5 | Provide context to chatGPT using data stored in SingleStoreDB. \ 6 | """ 7 | icon="crystal-ball" 8 | difficulty="beginner" 9 | tags=["starter", "openai", "vectordb", "genai"] 10 | lesson_areas=["AI"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/getting-started-with-ai-functions/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["bharath-swamy"] 3 | title="Demonstrate some common AI function usecases" 4 | description="""\ 5 | Learn how to connect to use \ 6 | pre configured AI Functions. 7 | """ 8 | icon="browser" 9 | difficulty="beginner" 10 | tags=["advanced", "notebooks", "python"] 11 | lesson_areas=["AI"] 12 | destinations=["spaces"] 13 | minimum_tier="standard" 14 | -------------------------------------------------------------------------------- /notebooks/load-json-files-s3/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Load JSON files with Pipeline from S3" 4 | description="""\ 5 | This notebook will help you load JSON files from a public open AWS S3 bucket. 6 | """ 7 | icon="chart-network" 8 | difficulty="beginner" 9 | tags=["advanced", "pipeline", "json", "s3"] 10 | lesson_areas=["Ingest"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/hybrid-search/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Hybrid Search" 4 | description="""\ 5 | Hybrid search combines keyword search with semantic search, 6 | aiming to provide more accurate results. 7 | """ 8 | difficulty="beginner" 9 | tags=["starter", "openai", "genai", "vectordb"] 10 | lesson_areas=["AI"] 11 | icon="vector-circle" 12 | destinations=["spaces"] 13 | minimum_tier="free-shared" 14 | -------------------------------------------------------------------------------- /notebooks/movie-recommendation/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Movie Recommendation" 4 | description="""\ 5 | Movie recommendation engine using vectors stored in SingleStore to find your next watch. \ 6 | """ 7 | icon="camera-movie" 8 | difficulty="intermediate" 9 | tags=["advanced", "vectordb", "genai", "openai"] 10 | lesson_areas=["AI"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/image-matching-with-sql/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Image Matching with SQL" 4 | description="""\ 5 | Facial recognition using dot_product function on vectors stored in SingleStoreDB. \ 6 | """ 7 | icon="face-viewfinder" 8 | difficulty="intermediate" 9 | tags=["starter", "vectordb", "genai", "facenet"] 10 | lesson_areas=["AI"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/ml-function-classify/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["bharath-swamy"] 3 | title="Demonstrate ML function Classify" 4 | description="""\ 5 | Learn how to train an ML Classify \ 6 | model and run it to predict the class of an input row. 7 | """ 8 | icon="browser" 9 | difficulty="beginner" 10 | tags=["advanced", "notebooks", "python"] 11 | lesson_areas=["AI"] 12 | destinations=["spaces"] 13 | minimum_tier="standard" 14 | -------------------------------------------------------------------------------- /notebooks/resume-evaluator/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Resume Evaluator" 4 | description="""\ 5 | Resume Evaluator using vectors stored in SingleStore to find suitable resumes for your job description. \ 6 | """ 7 | icon="arrows-spin" 8 | difficulty="intermediate" 9 | tags=["starter", "vectordb", "genai", "openai"] 10 | lesson_areas=["AI"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/getting-started-with-singlestore/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Getting Started with SingleStore" 4 | description="Test Drive SingleStore Multi-Model Examples in One Notebook" 5 | icon="database" 6 | difficulty="beginner" 7 | lesson_areas=["Data Management"] 8 | tags=["starter", "sql", "vectors", "fulltext", "json", "geospatial", "timeseries"] 9 | destinations=["spaces"] 10 | minimum_tier="free-shared" 11 | -------------------------------------------------------------------------------- /notebooks/launch-open-source-apps-with-langchain/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Launch Open-Source Apps with LangChain" 4 | description="""\ 5 | LangChain connector to use SingleStoreDB as your vector database for your apps. \ 6 | """ 7 | icon="vector-circle" 8 | difficulty="beginner" 9 | tags=["vectordb", "genai", "langchain"] 10 | lesson_areas=["AI"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/basic-query-examples/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Basic Query Examples" 4 | description="""\ 5 | Examples of basic SQL queries covering various database operations include index scans, full-table scans, joins, and aggregations. \ 6 | """ 7 | icon="notes" 8 | difficulty="beginner" 9 | tags=["starter", "sql"] 10 | lesson_areas=["Query Performance"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/kebab-case/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Automatically warm up your disk after resuming workspace" 4 | description="""\ 5 | Runs through the queries to bring data residing in object storage onto disk for a specified database. \ 6 | """ 7 | icon="database" 8 | difficulty="advanced" 9 | tags=["advanced", "automation", "tuning"] 10 | lesson_areas=[] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/performance-troubleshooting/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Database Performance Troubleshoot Notebook" 4 | description="""\ 5 | This notebook will help you perform database performance troubleshoot 6 | """ 7 | icon="database" 8 | difficulty="advanced" 9 | tags=["troubleshoot", "performance", "tuning", "starter"] 10 | lesson_areas=["Query Performance"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/getting-started-with-notebooks/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Getting Started with Notebooks" 4 | description="""\ 5 | SingleStore Notebooks with an AI Copilot (SQrL) for data prototyping, \ 6 | visualization and analysis. \ 7 | """ 8 | icon="notes" 9 | difficulty="beginner" 10 | tags=["starter", "python", "sqrl"] 11 | lesson_areas=["Python SDK"] 12 | destinations=["spaces"] 13 | minimum_tier="free-shared" 14 | -------------------------------------------------------------------------------- /notebooks/managing-stage-files-with-fusion-sql/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Managing Stage files with Fusion SQL" 4 | description="""\ 5 | Learn how to manage your Stage files in SingleStoreDB 6 | Cloud using Fusion SQL. 7 | """ 8 | icon="files" 9 | difficulty="intermediate" 10 | tags=["starter", "fusion", "python"] 11 | lesson_areas=["Python SDK", "Ingest"] 12 | destinations=["spaces"] 13 | minimum_tier="standard" 14 | -------------------------------------------------------------------------------- /notebooks/vector-search-with-kai/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title = "Vector Search with Kai" 4 | description = """ 5 | Run Vector Search using MongoDB clients and power GenAI usecases for your MongoDB applications """ 6 | icon = "radar" 7 | difficulty="intermediate" 8 | tags = ["mongo", "embeddings", "vector", "genai", "kai", "starter"] 9 | lesson_areas=["Kai", "AI"] 10 | destinations = ["spaces"] 11 | minimum_tier="standard" 12 | -------------------------------------------------------------------------------- /notebooks/load-kafka-template/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["chetan-thote"] 3 | title="Importing Data from Kafka into SingleStore using Pipelines" 4 | description="""\ 5 | This notebook demonstrates how to create a sample table in SingleStore, set up a pipeline to import data from Kafka topic.""" 6 | difficulty="beginner" 7 | tags=["loaddata", "kafka"] 8 | lesson_areas=["Ingest"] 9 | icon="database" 10 | destinations=["spaces"] 11 | minimum_tier="standard" 12 | -------------------------------------------------------------------------------- /notebooks/load-s3-files-into-shared-tier/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Load files from S3 into Shared Tier" 4 | description="""\ 5 | This notebook guides you through data ingestion of CSV files from an AWS S3 location into your shared tier workspace. \ 6 | """ 7 | icon="chart-network" 8 | difficulty="beginner" 9 | tags=["pipeline", "s3"] 10 | lesson_areas=["Ingest"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/semantic-search-with-openai-embedding-creation/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Semantic Search with OpenAI Embedding Creation" 4 | description="""\ 5 | Generate embeddings and run semantic search in your database in SQL. \ 6 | """ 7 | icon="chart-network" 8 | difficulty="beginner" 9 | tags=["starter", "openai", "vectordb", "genai"] 10 | lesson_areas=["AI"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/working-with-vector-data/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Working with Vector Data" 4 | description="""\ 5 | This example demonstrates the various ways to upload and download 6 | vector data from SingleStoreDB using Python. 7 | """ 8 | icon="browser" 9 | difficulty="beginner" 10 | tags=["starter", "vectodb", "genai"] 11 | lesson_areas=["AI", "Python SDK"] 12 | destinations=["spaces"] 13 | minimum_tier="free-shared" 14 | -------------------------------------------------------------------------------- /notebooks/cdc-mongodb-sql-commands/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title = "Ingest data from MONGODB® to SingleStore using SQL commands" 4 | description = """ 5 | Setup CDC Replication from MongoDB® to SingleStore using SQL commands. """ 6 | icon = "database" 7 | difficulty="intermediate" 8 | tags = ["mongo", "pipeline", "cdc", "ingest", "kai"] 9 | lesson_areas=["Data Management"] 10 | destinations = ["spaces"] 11 | minimum_tier="standard" 12 | -------------------------------------------------------------------------------- /notebooks/ml-function-anomaly-detect/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["bharath-swamy"] 3 | title="Demonstrate ML function Anomaly Detect" 4 | description="""\ 5 | Learn how to train an ML Anomaly Detect \ 6 | model and run it to predict the class of a set of time series inputs. 7 | """ 8 | icon="browser" 9 | difficulty="beginner" 10 | tags=["advanced", "notebooks", "python"] 11 | lesson_areas=["AI"] 12 | destinations=["spaces"] 13 | minimum_tier="standard" 14 | -------------------------------------------------------------------------------- /notebooks/unified-data-analysis-sql-nosql-kai/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Unified Data Analysis: SQL & NoSQL on a Single Database with Kai" 4 | description="""\ 5 | Perform both SQL and NoSQL queries on multi-modal relational and JSON data \ 6 | """ 7 | icon="database" 8 | difficulty="intermediate" 9 | tags=["cdc", "mongo", "sql", "nosql", "kai"] 10 | lesson_areas=["Kai"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/representing-unstructured-data-as-vectors/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Representing Unstructured Data as Vectors" 4 | description="Transforming unstructured data into vectors and representing them in a 3-dimensional space to find the distance between them using various techniques" 5 | difficulty="beginner" 6 | tags=["vectors", "unstructureddata"] 7 | lesson_areas=["Python SDK", "AI"] 8 | icon="vector-circle" 9 | minimum_tier="standard" 10 | -------------------------------------------------------------------------------- /notebooks/getting-started-with-dataframes/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Getting Started with DataFrames in SingleStoreDB" 4 | description="""\ 5 | Data in SingleStoreDB can be queried and modified using a 6 | familiar DataFrame syntax in addition to SQL. 7 | """ 8 | icon="browser" 9 | difficulty="beginner" 10 | tags=["starter", "dataframe", "python"] 11 | lesson_areas=["Python SDK"] 12 | destinations=["spaces"] 13 | minimum_tier="free-shared" 14 | -------------------------------------------------------------------------------- /notebooks/how-to-build-llm-apps-that-can-see-hear-speak/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="How to Build LLM Apps that can See Hear Speak" 4 | description="""\ 5 | Using OpenAI to build an app that can take images, audio, and text data to generate output 6 | """ 7 | icon="chart-network" 8 | difficulty="intermediate" 9 | tags=["advanced", "openai", "genai", "vectordb"] 10 | lesson_areas=["AI"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/integrating-with-pandas/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Integrating pandas with SingleStoreDB" 4 | description="""\ 5 | Moving data to and from pandas DataFrames is demonstrated using 6 | the SingleStoreDB Python library, SQLAlchemy, and Ibis. 7 | """ 8 | icon="database" 9 | difficulty="beginner" 10 | tags=["starter", "dataframe", "python"] 11 | lesson_areas=["Python SDK"] 12 | destinations=["spaces"] 13 | minimum_tier="free-shared" 14 | -------------------------------------------------------------------------------- /notebooks/load-csv-data-s3/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["chetan-thote"] 3 | title="Sales Data Analysis Dataset From Amazon S3" 4 | description="""\ 5 | The Sales Data Analysis use case demonstrates how to utilize SingleStore's powerful querying capabilities to analyze sales data stored in a CSV file.""" 6 | difficulty="beginner" 7 | tags=["starter", "loaddata", "s3"] 8 | lesson_areas=["Ingest"] 9 | icon="database" 10 | destinations=["spaces"] 11 | minimum_tier="free-shared" 12 | -------------------------------------------------------------------------------- /notebooks/load-csv-data-s3-placeholder/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["chetan-thote"] 3 | title="Importing Data from S3 into SingleStore using Pipelines" 4 | description="""\ 5 | This notebook demonstrates how to create a sample table in SingleStore, set up a pipeline to import data from an Amazon S3 bucket.""" 6 | difficulty="beginner" 7 | tags=["starter", "loaddata", "s3"] 8 | lesson_areas=["Ingest"] 9 | icon="database" 10 | destinations=["spaces"] 11 | minimum_tier="free-shared" 12 | -------------------------------------------------------------------------------- /notebooks/load-data-json/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["chetan-thote"] 3 | title="Employee Data Analysis JSON Dataset" 4 | description="""\ 5 | Employee Data Analysis use case illustrates how to leverage SingleStore's capabilities to process and analyze JSON data from a Amazon S3 data source. 6 | """ 7 | difficulty="beginner" 8 | tags=["starter", "loaddata", "json"] 9 | lesson_areas=["Ingest"] 10 | icon="database" 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/network-intrusion-detection-part-2/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="IT Threat Detection, Part 2" 4 | description="""\ 5 | Part 2 or Real-time threat Detection - Validate the accuracy of the 6 | threat detection model with a test dataset""" 7 | icon="browser" 8 | difficulty="advanced" 9 | tags=["advanced", "cybersecurity", "vectordb", "iot", "ai"] 10 | lesson_areas=["AI", "Real-time"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/real-time-anomaly-detection/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Real-Time Anomaly Detection" 4 | description="""\ 5 | Real-time anomaly detection in IoT sensor data, harnessing the robust capabilities of SingleStoreDB and advanced analytical techniques. \ 6 | """ 7 | icon="chart-scatter" 8 | difficulty="intermediate" 9 | tags=["vectordb", "realtime"] 10 | lesson_areas=["AI", "Real-time"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/building-a-multi-agent-ai-app-with-autogen/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="How to Build a Multi-Agent AI App with AutoGen" 4 | description="""\ 5 | Learn how to build a multi-agent group chat with RAG using Autogen and SingleStore \ 6 | """ 7 | icon="vector-circle" 8 | difficulty="intermediate" 9 | tags=["starter", "autogen", "rag", "multiagent", "groupchat"] 10 | lesson_areas=["AI"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/cloud-functions-template/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Publish your first SingleStore Cloud function" 4 | description="""\ 5 | Learn how to connect to SingleStoreDB and perform basic\ 6 | CRUD operations and finally deploy these functions as callable API endpoints. 7 | """ 8 | icon="browser" 9 | difficulty="beginner" 10 | tags=["starter", "notebooks", "python"] 11 | lesson_areas=[] 12 | destinations=["spaces"] 13 | minimum_tier="free-shared" 14 | -------------------------------------------------------------------------------- /notebooks/hybrid-full-text-vector-search/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Hybrid Full-text and Vector Search" 4 | description="Example of similarity search over vector data and a hybrid search that combines full-text search with an indexed vector search." 5 | difficulty="intermediate" 6 | tags=["starter", "openai", "genai", "vectordb"] 7 | lesson_areas=["AI", "Query Performance"] 8 | destinations=["spaces"] 9 | icon="vector-circle" 10 | minimum_tier="free-shared" 11 | -------------------------------------------------------------------------------- /notebooks/resume-evaluator-ANN-index-search/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Resume Evaluator with Vector Index" 4 | description="""\ 5 | Resume Evaluator using vectors stored in SingleStore to find sutiable resume for your job description, with Indexed ANN search.\ 6 | """ 7 | icon="arrows-spin" 8 | difficulty="intermediate" 9 | tags=["vectordb", "genai", "openai", "rag"] 10 | lesson_areas=["AI"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/semantic-search-with-hugging-face/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Semantic Search with Hugging Face Models and Datasets" 4 | description="""\ 5 | Use Hugging Face to create embeddings and run semantic search \ 6 | using dot product in SingleStoreDB. \ 7 | """ 8 | icon="chart-network" 9 | difficulty="beginner" 10 | tags=["starter", "vectordb", "huggingface"] 11 | lesson_areas=["AI"] 12 | destinations=["spaces"] 13 | minimum_tier="free-shared" 14 | -------------------------------------------------------------------------------- /notebooks/pipelines-query-tuning/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Kafka Pipelines and Query Tuning" 4 | description="""\ 5 | Create a SingleStore pipeline to track the International Space Station and adjust queries & schema to optimize performance. 6 | """ 7 | difficulty="advanced" 8 | tags=["beginner", "kafka", "pipeline", "querytuning"] 9 | lesson_areas=["Query Performance", "Ingest"] 10 | icon="database" 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/load-data-kakfa/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["chetan-thote"] 3 | title="Real-Time Event Monitoring Dataset From Kafka" 4 | description="""\ 5 | The Real-Time Event Monitoring use case illustrates how to leverage SingleStore's capabilities to process and analyze streaming data from a Kafka data source. 6 | """ 7 | difficulty="beginner" 8 | tags=["starter", "loaddata", "kafka"] 9 | lesson_areas=["Ingest"] 10 | icon="database" 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/evaluating-llms-with-uptrain/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Evaluating LLMs with Uptrain" 4 | description="""\ 5 | Using Uptrain to evaluate LLMs built with SingleStore as the contextual store. This notebook uses OpenAI embedding models and Langchain as a development framework. 6 | """ 7 | difficulty="intermediate" 8 | tags=["openai", "llm", "vectordb", "langchain"] 9 | lesson_areas=["AI"] 10 | icon="vector-circle" 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/running-notebooks-from-another-notebook-with-fusion-sql/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Running Notebooks from Another Notebook with Fusion SQL" 4 | description="""\ 5 | Learn how to run Notebooks from another Notebook 6 | in SingleStoreDB Cloud using Fusion SQL. 7 | """ 8 | icon="files" 9 | difficulty="intermediate" 10 | tags=["notebooks", "jobs", "python", "fusion", "starter"] 11 | lesson_areas=["Python SDK"] 12 | destinations=["spaces"] 13 | minimum_tier="standard" 14 | -------------------------------------------------------------------------------- /notebooks/searching-all-of-wikipedia/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title = "Searching all of Wikipedia" 4 | description = """\ 5 | Simulate searching through 10 million paragraphs in Wikipedia for information about video games, and generate answers using RAG! \ 6 | """ 7 | icon = "book-open-cover" 8 | difficulty="intermediate" 9 | tags = ["vector", "advanced", "vectordb", "hybridsearch", "rag", "ann"] 10 | lesson_areas=["AI"] 11 | destinations = ["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/getting-started-with-fusion-sql/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Getting Started with Fusion SQL" 4 | description="""\ 5 | Fusion SQL allows you to manage your SingleStoreDB Cloud 6 | resources such as workspace groups, workspaces, and 7 | Stage files all from SQL. 8 | """ 9 | icon="browser" 10 | difficulty="beginner" 11 | tags=["starter", "fusion", "python"] 12 | lesson_areas=["Data Management", "Python SDK"] 13 | destinations=["spaces"] 14 | minimum_tier="standard" 15 | -------------------------------------------------------------------------------- /notebooks/confluent-cloud-integration/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title = "Ingest data from Confluent Cloud (Kafka)" 4 | description = """ 5 | A step-by-step guide on seamlessly ingesting data from Confluent Cloud (Kafka) into your SingleStoreDB database using a robust pipeline. """ 6 | icon = "confluent-logo" 7 | difficulty="intermediate" 8 | tags = ["advanced", "confluent", "kafka", "pipeline", "realtime"] 9 | lesson_areas=["Integrations"] 10 | destinations = ["spaces"] 11 | minimum_tier="standard" 12 | -------------------------------------------------------------------------------- /notebooks/optimize-performance-with-tpch-100/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Learn how to Optimize Performance with TPCH 100" 4 | description="""\ 5 | This notebook will help you understand how you can take advantage of SingleStoreDB distributed capability using TPCH-100. 6 | """ 7 | icon="database" 8 | difficulty="advanced" 9 | tags=["advanced", "performance", "turning", "shardkey", "ingest", "tpch"] 10 | lesson_areas=["Query Performance"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/network-intrusion-detection-part-3/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="IT Threat Detection, Part 3" 4 | description=""" 5 | Part 3 of Real-time threat Detection - Integrate with Kafka, run 6 | and visualize Threat Detection on incoming logs. 7 | This notebook requires adjustments to work out of the box.""" 8 | icon="browser" 9 | difficulty="advanced" 10 | tags=["advanced", "cybersecurity", "vectordb", "iot", "ai"] 11 | lesson_areas=["AI", "Real-time"] 12 | destinations=["spaces"] 13 | minimum_tier="standard" 14 | -------------------------------------------------------------------------------- /lessons/it-threat-detection.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | title = "IT Threat Detection" 3 | description="""\ 4 | Introducing the IT Threat Detection Module! Over three concise lessons, you'll master the art of fortifying your organization's cybersecurity. Learn to identify threats, validate defenses, and seamlessly integrate protective measures. Get set to elevate your digital security prowess! 5 | """ 6 | notebooks = [ 7 | "network-intrusion-detection-part-1", 8 | "network-intrusion-detection-part-2", 9 | "network-intrusion-detection-part-3" 10 | ] 11 | -------------------------------------------------------------------------------- /notebooks/ingest-pdfs-with-pdfplumber/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Ask questions of your PDFs with PDFPlumber" 4 | description="Ask questions of your unstructured PDFs. In this notebook, PDFPlumber ingests pdfs, then Open AI is used to create embeddings, the vector data is stored in SingleStore and finally ask questions of your PDF data" 5 | icon="file-export" 6 | difficulty="beginner" 7 | tags=["ingest", "pdf","vector","pdfplumber"] 8 | lesson_areas=["AI", "Integrations"] 9 | destinations=["spaces"] 10 | minimum_tier="standard" 11 | -------------------------------------------------------------------------------- /notebooks/singlestore-april-challenge-haiku-ascii/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="SingleStore April Challenge: Haiku ASCII" 4 | description="""\ 5 | SingleStore is hosting a challenge inviting participants to craft a unique Haiku or create captivating ASCII art using SingleStore Notebooks. The most creative masterpiece wins a set of Meta RayBan Smart sunglasses and a $500 AWS gift card!" \ 6 | """ 7 | icon="chart-network" 8 | difficulty="intermediate" 9 | tags=["challenge", "haiku", "ascii"] 10 | lesson_areas=[] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /notebooks/rag-with-bedrock/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Retrieval Augmented Question & Answering with Amazon Bedrock" 4 | description="""\ 5 | Infuse domain-specific knowledge from SingleStoreDB into generative AI models on Amazon Bedrock to showcase Question & Answering using RAG (retrieval-agumented generation) to improve the quality of responses.\ 6 | """ 7 | icon="crystal-ball" 8 | difficulty="intermediate" 9 | tags=["vectordb", "genai", "bedrock", "rag", "starter"] 10 | lesson_areas=["AI", "Integrations"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /notebooks/real-time-recommendation-engine/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Real Time Recommendation Engine" 4 | description="""\ 5 | We demonstrate how to build and host a real-time recommendation engine for free with SingleStore. The notebook also leverages our new SingleStore Job Service to ensure that the latest data is ingested and used in providing recommendations.\ 6 | """ 7 | icon="crystal-ball" 8 | difficulty="intermediate" 9 | tags=["starter", "openai", "vercel", "realtime", "vectordb"] 10 | lesson_areas=["AI", "Real-time"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /.github/workflows/deploy-bifrost.yml: -------------------------------------------------------------------------------- 1 | name: Trigger bifrost deployment 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | deploy: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Trigger gitlab pipeline 14 | env: 15 | GITLAB_API_TOKEN: ${{ secrets.GITLAB_API_TOKEN }} 16 | run: | 17 | curl --request POST \ 18 | --form token=$GITLAB_API_TOKEN \ 19 | --form ref=master \ 20 | --form variables[SPACES_TRIGGER]="production" \ 21 | https://gitlab.com/api/v4/projects/33388494/ref/master/trigger/pipeline/ 22 | -------------------------------------------------------------------------------- /meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors = [ 3 | {name="Tim Green", email="tgreen@singlestore.com"}, 4 | {name="Kevin Smith", email="ksmith@singlestore.com"}, 5 | ] 6 | 7 | [meta.urls] 8 | homepage = "https://singlestore.com" 9 | documentation = "https://docs.singlestore.com" 10 | 11 | [samples] 12 | display = [ 13 | "getting-started-with-notebooks", 14 | "notebook-basics", 15 | "getting-started-with-dataframes", 16 | "image-matching-with-sql", 17 | "movie-recommendation", 18 | "semantic-search-with-hugging-face", 19 | "semantic-search-with-openai-qa", 20 | "semantic-search-with-openai-embedding-creation", 21 | "atlas-and-kai", 22 | ] 23 | -------------------------------------------------------------------------------- /notebooks/semantic-visualization-and-vector-datatype/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Semantic Visualization and Vector Datatype" 4 | description="""\ 5 | SingleStoreDB leverages vector database processing to enable storage and search of vector data, allowing for semantic similarity-based searches to find content based on meaning rather than exact matches, such as locating related words like "happy" and "joyful." \ 6 | """ 7 | icon="chart-network" 8 | difficulty="beginner" 9 | tags=["starter", "vectordb", "genai", "huggingface", "semanticsearch"] 10 | lesson_areas=["AI", "Python SDK"] 11 | destinations=["spaces"] 12 | minimum_tier="free-shared" 13 | -------------------------------------------------------------------------------- /.github/workflows/pre-commit.yml: -------------------------------------------------------------------------------- 1 | name: pre-commit checks 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | 9 | jobs: 10 | check: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v3 15 | 16 | - name: Set up Python 17 | uses: actions/setup-python@v4 18 | with: 19 | python-version: "3.11" 20 | 21 | - name: Install dependencies 22 | run: | 23 | python -m pip install --upgrade pip 24 | pip install pre-commit==3.7 25 | 26 | - name: Analysing the code with pre-commit checks 27 | run: | 28 | pre-commit run --all-files 29 | -------------------------------------------------------------------------------- /notebooks/network-intrusion-detection-part-1/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="IT Threat Detection, Part 1" 4 | description=""" 5 | Part 1 or Real-time threat Detection - This notebook demonstrates the 6 | application of SingleStoreDB's similarity search to create a system 7 | for identifying infrequent occurrences, a common requirement in fields such as cybersecurity 8 | and fraud detection where only a small percentage of events are potentially malicious.""" 9 | icon="browser" 10 | difficulty="advanced" 11 | tags=["advanced", "cybersecurity", "vectordb", "iot", "ai"] 12 | lesson_areas=["AI", "Real-time"] 13 | destinations=["spaces"] 14 | minimum_tier="standard" 15 | -------------------------------------------------------------------------------- /notebooks/inserting-embeddings-from-multiple-models-into-singlestore-using-external-functions/meta.toml: -------------------------------------------------------------------------------- 1 | [meta] 2 | authors=["singlestore"] 3 | title="Inserting embeddings from multiple models into SingleStore Using External Functions" 4 | description="""\ 5 | Discover the power of SingleStoreDB's external functions to dynamically fetch and store vector embeddings from leading AI models into your database. This demo highlights leveraging SingleStore's robust vector data type and external functions for efficient management and analysis of machine learning embeddings.\ 6 | """ 7 | icon="vector-circle" 8 | difficulty="intermediate" 9 | tags=["vectordb", "genai", "openai", "rag", "huggingface"] 10 | lesson_areas=["AI", "Data Management"] 11 | destinations=["spaces"] 12 | minimum_tier="standard" 13 | -------------------------------------------------------------------------------- /resources/lesson-check.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | import sys 4 | import tomllib 5 | 6 | 7 | def check_lesson(lesson_path): 8 | print('Checking ' + lesson_path) 9 | 10 | with open(lesson_path, 'rb') as f: 11 | meta = tomllib.load(f) 12 | 13 | notebooks = meta['meta']['notebooks'] 14 | 15 | for notebook in notebooks: 16 | notebook_path = os.path.join( 17 | 'notebooks', 18 | notebook, 19 | 'notebook.ipynb', 20 | ) 21 | 22 | if not os.path.isfile(notebook_path): 23 | print( 24 | f'error: notebook file does not exist at {notebook_path}', 25 | file=sys.stderr, 26 | ) 27 | sys.exit(1) 28 | 29 | 30 | if __name__ == '__main__': 31 | for f in sys.argv[1:]: 32 | check_lesson(f) 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ## Editor temporary/working/backup files 2 | .#* 3 | *\#*\# 4 | [#]*# 5 | *~ 6 | *$ 7 | *.bak 8 | *.old 9 | *flymake* 10 | *.kdev4 11 | *.log 12 | *.nfs* 13 | *.swo 14 | *.swp 15 | *.pdb 16 | .project 17 | .pydevproject 18 | .settings 19 | .idea 20 | .vagrant 21 | .noseids 22 | .ipynb_checkpoints 23 | .tags 24 | .vscode 25 | 26 | ## Compiled source 27 | *.com 28 | *.class 29 | *.dll 30 | *.dylib 31 | *.exe 32 | *.o 33 | *.py[ocd] 34 | *.so 35 | *.na 36 | .build_cache_dir 37 | MANIFEST 38 | 39 | ## Python files 40 | # setup.py working directory 41 | build 42 | # sphinx build directory 43 | docs/src/_build 44 | # setup.py dist directory 45 | dist 46 | # Egg metadata 47 | *.egg-info 48 | .eggs 49 | .pypirc 50 | 51 | ## tox testing tool 52 | .tox 53 | # rope 54 | .ropeproject 55 | # wheel files 56 | *.whl 57 | **/wheelhouse/* 58 | # coverage 59 | .coverage 60 | swat.egg-info/ 61 | __pycache__/ 62 | _stats.txt 63 | cover/ 64 | test-reports/ 65 | 66 | ## OS generated files 67 | .directory 68 | .gdb_history 69 | .DS_Store 70 | ehthumbs.db 71 | Icon? 72 | Thumbs.db 73 | 74 | ## Documentation generated files 75 | docs/src/_build/doctrees 76 | docs/src/generated 77 | 78 | dask-worker-space 79 | 80 | ## Test configuration 81 | singlestoredb/mysql/tests/databases.json 82 | -------------------------------------------------------------------------------- /notebooks/insure-gpt-demo/assets/insurance_claim.txt: -------------------------------------------------------------------------------- 1 | 2 | Insurance Claim Document 3 | 4 | Policyholder Information: 5 | Name: John Doe 6 | Address: 123 Main St, Anytown, AN 12345 7 | Contact Information: (555) 123-4567 / johndoe@email.com 8 | Policy Number: POL-125367 9 | 10 | Vehicle Information: 11 | Make: Honda 12 | Model: Civic 13 | Year: 2010 14 | VIN: 4576227 15 | License Plate Number: 4674XYZ 16 | 17 | Incident Information: 18 | Date and Time of the Accident: 2024-01-29 14:09:04 19 | Location of the Accident: Intersection of 5th and Main 20 | Brief Description of the Accident: Collision with a tree on the right side of the road. 21 | Weather and Road Conditions: Clear weather, dry road 22 | Police Report Number: REP-68865 23 | Other Involved Parties or Witnesses: None 24 | 25 | Damage Information and Repair Estimate: 26 | - Front Bumper: $300 USD 27 | - Hood: $450 USD 28 | - Front Grille: $200 USD 29 | - Headlights: $250 USD 30 | - Radiator: $500 USD 31 | - Engine Repair: $1500 USD 32 | - Front Fenders: $350 USD 33 | - Windshield: $300 USD 34 | 35 | Total Estimated Repair Cost: $3850 USD 36 | 37 | Declaration: 38 | I hereby declare that the information provided above is accurate to the best of my knowledge and belief. 39 | 40 | Signature: _______________________________ 41 | 42 | Date: [Date] 43 | -------------------------------------------------------------------------------- /resources/author-check.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | import re 4 | import sys 5 | import tomllib 6 | 7 | 8 | def error(msg): 9 | print('ERROR:', msg, file=sys.stderr) 10 | sys.exit(1) 11 | 12 | 13 | def check_author(author_path): 14 | print(f'Checking {author_path}...') 15 | 16 | with open(author_path, 'rb') as f: 17 | meta = tomllib.load(f) 18 | 19 | if 'name' not in meta: 20 | error(f'No `name` in `meta` section of {author_path}') 21 | 22 | if 'title' not in meta: 23 | error(f'No `title` in `meta` section of {author_path}') 24 | 25 | if 'external' not in meta: 26 | error(f'No `external` in `meta` section of {author_path}') 27 | 28 | # Image is optional, but if defined a corresponding image must exist 29 | # Image can either be a URL or a filename in common/images/author-images 30 | if 'image' in meta: 31 | img_reference = meta['image'] 32 | is_url = bool(re.match(r'^https?://', img_reference)) 33 | 34 | if (not is_url): 35 | img_filename = f'{img_reference}.png' 36 | img_path = os.path.join('common/images/author-images', img_filename) 37 | if not os.path.isfile(img_path): 38 | error(f'Author image does not exist at {img_path} for {author_path}') 39 | 40 | 41 | if __name__ == '__main__': 42 | for f in sys.argv[1:]: 43 | check_author(f) 44 | -------------------------------------------------------------------------------- /notebooks/launch-open-source-apps-with-langchain/README.md: -------------------------------------------------------------------------------- 1 | # LangChain Lift-Off: Launching Open-Source Apps on Private Network 2 | 3 | Get ready for "LangChain Lift-off: Launch Your Open Source GPT Apps Today". This is the future of AI, where internal data including documents, wikis, code, and meeting notes are at your AI's fingertips. Our ideal tool for the job, LangChain, can store vector data, perform semantic searches, and pull data from various sources without extensive ETL. 4 | 5 | Join us on June 22nd for an exclusive webinar featuring Akmal Chaudhri. This event is perfect for developers, data engineers, and anyone interested in building intelligent GPT applications. 6 | 7 | Don't miss this chance to learn from the expert and gain valuable insights on how to build a GPT App on an open-source stack using LangChain. Register now! 8 | 9 | ## Discussion Topics 10 | 11 | - Dive deep into building a GPT App using LangChain, with hands-on examples and live coding. 12 | - Uncover LangChain’s native support for efficient vector functions to power Generative AI with simple SQL queries. 13 | - Absorb practical techniques and strategies for building intelligent GPT applications. 14 | - Delve into the power of LangChain's scalable, distributed architecture and OpenAI's advanced machine learning models for GPT. 15 | 16 | ## Free Trial! 17 | Don't forget to try this for yourself by signing up for a [free trial of SingleStoreDB](https://www.singlestore.com/cloud-trial/?utm_campaign=7014X000002edsdQAA&utm_medium=webinar&utm_source=singlestore&utm_content=webinar-github) today! 18 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: https://github.com/pre-commit/pre-commit-hooks 3 | rev: v4.1.0 4 | hooks: 5 | - id: trailing-whitespace 6 | - id: end-of-file-fixer 7 | - id: check-docstring-first 8 | - id: check-json 9 | # - id: check-added-large-files 10 | - id: check-yaml 11 | - id: debug-statements 12 | # - id: name-tests-test 13 | - id: double-quote-string-fixer 14 | - id: requirements-txt-fixer 15 | - repo: https://github.com/PyCQA/flake8 16 | rev: 7.1.1 17 | hooks: 18 | - id: flake8 19 | additional_dependencies: [flake8-typing-imports==1.12.0] 20 | - repo: https://github.com/hhatto/autopep8 21 | rev: v2.3.2 22 | hooks: 23 | - id: autopep8 24 | - repo: https://github.com/asottile/reorder_python_imports 25 | rev: v2.6.0 26 | hooks: 27 | - id: reorder-python-imports 28 | args: [--py36-plus] 29 | - repo: https://github.com/asottile/add-trailing-comma 30 | rev: v2.2.1 31 | hooks: 32 | - id: add-trailing-comma 33 | args: [--py36-plus] 34 | - repo: https://github.com/asottile/setup-cfg-fmt 35 | rev: v1.20.0 36 | hooks: 37 | - id: setup-cfg-fmt 38 | - repo: https://github.com/pre-commit/mirrors-mypy 39 | rev: v1.11.1 40 | hooks: 41 | - id: mypy 42 | - repo: local 43 | hooks: 44 | - id: nb-check 45 | name: nb-check 46 | entry: resources/nb-check.py 47 | language: python 48 | files: \.ipynb$ 49 | exclude: notebooks/notebook-style-guide/notebook.ipynb 50 | additional_dependencies: [nbformat==5.10.4] 51 | - id: nb-meta-check 52 | name: nb-meta-check 53 | entry: resources/nb-meta-check.py 54 | language: system 55 | files: notebooks/.*/meta\.toml 56 | - id: lesson-check 57 | name: lesson-check 58 | entry: resources/lesson-check.py 59 | language: system 60 | files: lessons/.*\.toml 61 | - id: author-check 62 | name: author-check 63 | entry: resources/author-check.py 64 | language: system 65 | files: authors/.*\.toml 66 | -------------------------------------------------------------------------------- /.github/workflows/release-samples.yml: -------------------------------------------------------------------------------- 1 | name: Upload Release Assets 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | 12 | # Needed for creating tag 13 | permissions: 14 | contents: write 15 | 16 | steps: 17 | - name: Checkout code 18 | uses: actions/checkout@v3 19 | 20 | - name: Set up Python 3.11 21 | uses: actions/setup-python@v5 22 | with: 23 | python-version: "3.11" 24 | 25 | - name: Install dependencies 26 | run: | 27 | python -m pip install --upgrade pip 28 | 29 | - name: Set current date as env variable 30 | run: echo "DATETIME=$(date +'%Y-%m-%dT%H-%M-%S')" >> $GITHUB_ENV 31 | 32 | # - name: Bump version and push tag 33 | # id: tag_version 34 | # uses: mathieudutour/github-tag-action@v6.2 35 | # with: 36 | # custom_tag: v${{ env.DATETIME }} 37 | # tag_prefix: "" 38 | # github_token: ${{ secrets.GITHUB_TOKEN }} 39 | # run: 40 | # git config --global user.name "${GITHUB_ACTOR}" 41 | # git config --global user.email "${GITHUB_ACTOR}@users.noreply.github.com" 42 | # git tag "v${{ env.DATETIME }}" 43 | # git push origin "v${{ env.DATETIME }}" 44 | 45 | - name: Bump version and push tag 46 | uses: actions/github-script@v7 47 | with: 48 | script: | 49 | github.rest.git.createRef({ 50 | owner: context.repo.owner, 51 | repo: context.repo.repo, 52 | ref: "refs/tags/v${{ env.DATETIME }}", 53 | sha: context.sha 54 | }) 55 | 56 | - name: Build notebooks 57 | run: | 58 | python resources/package-samples.py notebooks --strip-output --outfile notebooks-stripped.zip --notebooks all 59 | python resources/package-samples.py notebooks --outfile notebooks-full.zip --notebooks all 60 | 61 | - name: Create Release and Upload Assets 62 | env: 63 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 64 | run: | 65 | gh release create "v${{ env.DATETIME }}" \ 66 | notebooks-stripped.zip \ 67 | notebooks-full.zip \ 68 | --title "Release v${{ env.DATETIME }}" \ 69 | --notes "Automated release of notebook samples" 70 | -------------------------------------------------------------------------------- /notebooks/performance-troubleshooting/assets/DB_PERFORMANCE_TROUBLESHOOT_QUERIES.csv: -------------------------------------------------------------------------------- 1 | QueryID,QueryName,QueryTxt 2 | 1,Plan Warnings,"SELECT plan_id, plan_warnings 3 | FROM information_schema.plancache 4 | WHERE plan_warnings LIKE '%mismatched datatypes%' ; " 5 | 2,Shard and Sort Key Summary,"WITH shard_sort AS (SELECT t.TABLE_SCHEMA, t.TABLE_NAME, 6 | GROUP_CONCAT(CASE WHEN s.INDEX_TYPE = 'SHARD' THEN s.COLUMN_NAME END) AS SHARD_KEY, 7 | GROUP_CONCAT(CASE WHEN s.INDEX_TYPE='CLUSTERED COLUMNSTORE' THEN s.COLUMN_NAME END) AS SORT_KEY, 8 | t.STORAGE_TYPE 9 | FROM information_schema.tables t LEFT JOIN information_schema.STATISTICS s ON (t.TABLE_SCHEMA=s.TABLE_SCHEMA AND t.TABLE_NAME=s.TABLE_NAME) 10 | WHERE t.table_schema<>'information_schema' 11 | AND t.table_type not like '%VIEW%' 12 | GROUP BY t.TABLE_SCHEMA, t.TABLE_NAME, t.STORAGE_TYPE 13 | ORDER BY t.TABLE_SCHEMA, t.TABLE_NAME,t.STORAGE_TYPE) 14 | SELECT s.TABLE_SCHEMA,s.TABLE_NAME, s.SHARD_KEY, s.SORT_KEY,s.STORAGE_TYPE, 15 | (CASE WHEN PARTITION_TYPE = 'Reference' THEN 'REFERENCE TABLE' ELSE 'DISTRIBUTED TABLE' END) AS TABLE_TYPE, 16 | SUM(t.ROWS) AS TOTAL_ROWS, 17 | MIN(t.ROWS) as MIN_ROWS_PER_PARTITION, 18 | max(t.ROWS) as MAX_ROWS_PER_PARTITION, 19 | FLOOR(AVG(t.ROWS)) AS AVG_ROWS_PER_PARTITION, 20 | ROUND(STDDEV(ROWS) / AVG(t.ROWS), 3) * 100 AS ROW_SKEW, 21 | MIN(memory_use / 1024 / 1024) as MIN_MEMORY_MB_PER_PARTITION, 22 | MAX(memory_use / 1024 / 1024) as MAX_MEMORY_MB_PER_PARTITION, 23 | FLOOR(AVG(MEMORY_USE / 1024 / 1024)) AS AVG_MEMORY_MB_PER_PARTITION, 24 | ifnull(ROUND(STDDEV(MEMORY_USE) / AVG(MEMORY_USE), 3) * 100, 0) AS MEMORY_SKEW 25 | FROM shard_sort s INNER JOIN information_schema.TABLE_STATISTICS t ON (s.TABLE_SCHEMA=t.DATABASE_NAME AND s.TABLE_NAME=t.TABLE_NAME) 26 | WHERE t.PARTITION_TYPE IN ('Master','Reference') 27 | GROUP BY s.TABLE_SCHEMA,s.TABLE_NAME, s.SHARD_KEY, s.SORT_KEY,s.STORAGE_TYPE, (CASE WHEN PARTITION_TYPE = 'Reference' THEN 'REFERENCE TABLE' ELSE 'DISTRIBUTED TABLE' END) 28 | ORDER BY s.TABLE_NAME" 29 | 3,Pipeline Lag,"select DATABASE_NAME as db, 30 | PIPELINE_NAME as pipeline, 31 | SOURCE_PARTITION_ID as partition_with_max_lag, 32 | max(LATEST_OFFSET - CURSOR_OFFSET) as lag_val 33 | from information_schema.PIPELINES_CURSORS 34 | group by DATABASE_NAME, 35 | PIPELINE_NAME 36 | ;" 37 | 4,Query to track Queries with repartitioning or broadcasting operations,"SELECT mqcu.activity_name, 38 | mqcu.table_name, 39 | mqa.query_text, 40 | mqa.database_name, 41 | mqcu.reshuffles 42 | FROM information_schema.mv_query_column_usage mqcu 43 | JOIN information_schema.mv_query_activities mqa 44 | ON mqcu.activity_name = mqa.activity_name 45 | AND mqcu.reshuffles > 10 46 | AND mqcu.activity_name NOT LIKE 'RunPipeline%' 47 | AND mqa.database_name <> 'information_schema' ;" 48 | 5,Duplicate Columns in Database,"WITH ColumnDetails AS ( SELECT TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, COLUMN_TYPE, ROW_NUMBER() OVER(PARTITION BY COLUMN_NAME ORDER BY TABLE_NAME) AS COLUMN_INDEX 49 | FROM information_schema.columns where table_schema <> 'information_schema') SELECT TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, COLUMN_TYPE , COLUMN_INDEX 50 | FROM ColumnDetails WHERE COLUMN_INDEX > 1 ORDER BY TABLE_SCHEMA, TABLE_NAME, COLUMN_NAME, COLUMN_TYPE;" 51 | -------------------------------------------------------------------------------- /notebooks/inserting-embeddings-from-multiple-models-into-singlestore-using-external-functions/external_function_api.py: -------------------------------------------------------------------------------- 1 | import json 2 | import time 3 | from concurrent.futures import ThreadPoolExecutor 4 | 5 | import openai 6 | import torch 7 | from flask import Flask 8 | from flask import request 9 | from openai import OpenAI 10 | from transformers import AutoModel 11 | from transformers import AutoTokenizer 12 | 13 | # Set up OpenAI 14 | api_key = 'add your openai key' 15 | client = OpenAI(api_key=api_key) 16 | 17 | # Load Hugging Face model 18 | model_name = 'sentence-transformers/paraphrase-multilingual-MiniLM-L12-v2' 19 | hf_model = AutoModel.from_pretrained(model_name) 20 | hf_tokenizer = AutoTokenizer.from_pretrained(model_name) 21 | 22 | # Hugging Face embedding function 23 | 24 | 25 | def get_hf_embedding(texts): 26 | embeddings = [] 27 | for text in texts: 28 | inputs = hf_tokenizer(text, padding=True, truncation=True, return_tensors='pt') 29 | with torch.no_grad(): 30 | embedding = hf_model(**inputs).last_hidden_state.mean(dim=1) 31 | norm = torch.linalg.vector_norm(embedding, ord=2, dim=1, keepdim=True) 32 | normalized_embedding = embedding / norm 33 | embeddings.append(normalized_embedding.squeeze().tolist()) 34 | return embeddings 35 | 36 | # OpenAI embedding function 37 | 38 | 39 | def get_ada_002_embedding(texts, model='text-embedding-ada-002'): 40 | responses = openai.embeddings.create(input=texts, model=model) 41 | return [response.embedding for response in responses.data] 42 | 43 | 44 | def process_batch(batch, model_name): 45 | texts = [text for text in batch if isinstance(text, str) and text.strip()] 46 | if not texts: 47 | return [] 48 | if model_name == 'openai_embedding': 49 | try: 50 | return get_ada_002_embedding(texts, 'text-embedding-ada-002') 51 | except Exception as e: 52 | print(f'Error in OpenAI processing: {e}') 53 | return [] 54 | elif model_name == 'hf_embedding': 55 | return get_hf_embedding(texts) 56 | else: 57 | print(f'Invalid model name: {model_name}') 58 | return [] 59 | 60 | 61 | app = Flask(__name__) 62 | 63 | 64 | @app.route('/functions/get_embedding', methods=['POST']) 65 | def get_embedding(): 66 | """ incoming data is this format : 67 | {"data": 68 | [[, , ], 69 | [, , ], 70 | ... ]} 71 | """ 72 | start_time = time.time() 73 | row_ids, args, model_names = [], [], [] 74 | for row_id, data, model_name in request.json['data']: 75 | row_ids.append(row_id) 76 | args.append(data) 77 | model_names.append(model_name) 78 | 79 | batch_size = 1024 80 | futures = [] 81 | with ThreadPoolExecutor(max_workers=len(args) // batch_size) as executor: 82 | for i in range(0, len(args), batch_size): 83 | batch = args[i:i + batch_size] 84 | # Assuming all texts in the batch use the same model 85 | model_name = model_names[i] 86 | futures.append(executor.submit(process_batch, batch, model_name)) 87 | 88 | flat_results = [future.result() for future in futures] 89 | time_taken = time.time() - start_time 90 | app.logger.info(f'Time taken: {time_taken} seconds') 91 | res = map(json.dumps, flat_results) 92 | return dict(data=list(zip(row_ids, res))) 93 | 94 | 95 | if __name__ == '__main__': 96 | app.run(debug=True, host='0.0.0.0', port=5000) 97 | -------------------------------------------------------------------------------- /resources/nb-meta-check.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | import os 3 | import re 4 | import sys 5 | import tomllib 6 | 7 | 8 | def kebab_case(string): 9 | # Naive implementation of kebab case to find icon names from lesson areas 10 | return re.sub(r'[^a-zA-Z0-9]+', '-', string.strip()).lower() 11 | 12 | 13 | def error(msg): 14 | print('ERROR:', msg, file=sys.stderr) 15 | sys.exit(1) 16 | 17 | 18 | for f in sys.argv[1:]: 19 | 20 | with open(f, 'r') as infile: 21 | info = tomllib.loads(infile.read()) 22 | 23 | if 'meta' not in info: 24 | error(f'No `meta` section in `{f}`') 25 | 26 | # The meta section requires, title, description, and icon 27 | meta = info['meta'] 28 | 29 | if 'title' not in meta: 30 | error(f'No `title` in `meta` section of {f}') 31 | 32 | if 'description' not in meta: 33 | error(f'No `description` in `meta` section of {f}') 34 | 35 | # Authors must be a non-empty list 36 | if ( 37 | 'authors' not in meta 38 | or not isinstance(meta['authors'], list) 39 | or not meta['authors'] 40 | ): 41 | error(f'No `authors` in `meta` section of {f}') 42 | 43 | if 'icon' not in meta: 44 | error(f'No `icon` in `meta` section of {f}') 45 | 46 | if 'minimum_tier' not in meta: 47 | error( 48 | f'No `minimum_tier` in `meta` section of {f}; ' 49 | f'it must be set to "free-shared" or "standard"', 50 | ) 51 | 52 | if meta['minimum_tier'] not in ['free-shared', 'standard']: 53 | error( 54 | f'`minimum_tier` in `meta` section of {f} ' 55 | f'must be set to "free-shared" or "standard"', 56 | ) 57 | 58 | if 'lesson_areas' not in meta: 59 | error( 60 | f'No `lesson_areas` in `meta` section of {f}; ' 61 | f'it must be an array of strings (can be empty)', 62 | ) 63 | 64 | if not isinstance(meta['lesson_areas'], list): 65 | error( 66 | f'`lesson_areas` in `meta` section of {f} must be a list', 67 | ) 68 | 69 | # Tags must be all lower-case, ascii letters 70 | tags = meta.get('tags', []) 71 | 72 | if [x.lower() for x in tags] != tags: 73 | error(f'Tags must be in all lower-case ({tags}) in {f}') 74 | 75 | if [re.sub(r'[^a-z0-9]', r'', x) for x in tags] != tags: 76 | error(f'Tags can only contain letters and numbers ({tags}) in {f}') 77 | 78 | if len(tags) != len(set(tags)): 79 | error(f'Duplicate tag found ({tags}) in {f}') 80 | 81 | # Currently only "spaces" is allowed in destinations 82 | destinations = meta.get('destinations', []) 83 | 84 | if destinations and [x for x in destinations if x != 'spaces']: 85 | error(f'Only "spaces" is allowed in `destinations` in {f}') 86 | 87 | card_icons = os.listdir('common/images/card-header-icons') 88 | preview_icons = os.listdir('common/images/preview-header-icons') 89 | 90 | for lesson_area in meta['lesson_areas']: 91 | expected_icon_name = f'{kebab_case(lesson_area)}.png' 92 | if expected_icon_name not in card_icons: 93 | error(f'Lesson area {lesson_area} not found in card icons') 94 | if expected_icon_name not in preview_icons: 95 | error(f'Lesson area {lesson_area} not found in preview icons') 96 | 97 | # Authors must have a corresponding author entry 98 | author_meta_files = os.listdir('authors') 99 | for author in meta['authors']: 100 | author_filename = f'{author}.toml' 101 | if author_filename not in author_meta_files: 102 | error(f'Author {author} does not have a corresponding author entry in {f}') 103 | -------------------------------------------------------------------------------- /notebooks/performance-troubleshooting/assets/templates/index.template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | SingleStore DB Performance Troubleshoot Summary 7 | 13 | 14 | 15 | 24 | 25 | 39 | 53 | 54 |
34 | 38 | 48 | 52 |
55 | 56 |
57 |

58 | SingleStore Notebook to identify common performance Issues 59 |

60 | 61 |
    62 |
  • Query information is loaded for csv file exposed on public url
  • 63 |
  • Results are exported into paginated html tables
  • 64 |
  • 65 | Archive of generated html files is uploaded to SingleStore Stage using 66 | SingleStore Python Client 67 |
  • 68 |
69 |
70 | 71 |

72 | Results Index 73 |

74 | 75 |
76 | 77 | 78 | 84 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 110 | 111 | 112 | 113 | 117 | 118 | 119 | 120 |
82 | Query ID 83 | 88 | Query Name 89 |
1Plan Warnings
2Shard and Sort Key Summary
3Pipeline Lag
4 107 | Queries with repartitioning or broadcasting operations 109 |
5 114 | Duplicate Columns 116 |
121 |
122 | 123 | 128 | 129 | 130 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # SingleStore Spaces Contributing Guide 2 | 3 | First, please fork this repo and commit your changes to the forked repo. From there make a Pull Request with your notebook submission keeping the following in mind: 4 | 5 | 6 | ## Key Requirements 7 | 8 | First, you should check whether this notebook can be run using a Free Shared Tier instance. This is ideal for scenarios requiring few tables (<10), pipelines (<10) and less than 1GB of compressed storage. See the [limitations](https://docs.singlestore.com/cloud/shared-edition/) to determine if your notebook can be run on a Free Shared Tier database. 9 | 10 | If your database can be run using the Free Shared Tier (Starter Workspace), you must: 11 | 1. Mention this using a Markdown cell at the top of the notebook. See [example](https://www.singlestore.com/spaces/mongo-atlas-single-store-kai/). You should explicitly state that this notebook can run on both the Starter and Standard Workspaces. 12 | 2. Add "starter" to the tags in the meta.toml file below. 13 | 3. Use the following syntax when creating OR dropping your database. NOTE: When you create a Starter Workspace, a database is automatically created for it. Starter Workspaces are limited to one database. The only way you can drop the auto-created database linked to the Starter Workspace is by terminating the Workspace altogether. 14 | 15 | ```python 16 | shared_tier_check = %sql show variables like 'is_shared_tier' 17 | if not shared_tier_check or shared_tier_check[0][1] == 'OFF': 18 | %sql DROP DATABASE IF EXISTS your_database_name; 19 | %sql CREATE DATABASE your_database_name 20 | ``` 21 | 22 | 4. At the end of the notebook, you should do the same check when cleaning up any databases created. 23 | 24 | ```python 25 | shared_tier_check = %sql show variables like 'is_shared_tier' 26 | if not shared_tier_check or shared_tier_check[0][1] == 'OFF': 27 | %sql DROP DATABASE IF EXISTS your_database_name; 28 | ``` 29 | 30 | 31 | If your database can only be run using Standard workspaces, you must: 32 | 1. Mention this using a Markdown cell at the top of the notebook. See [example](https://www.singlestore.com/spaces/ingest-data-from-confluent-cloud-kafka/). You should explicitly state that this notebook can run only on Standard Workspaces. 33 | 2. Add "advanced" to the tags in the meta.toml file below. 34 | 35 | 36 | 37 | ## File structure 38 | 39 | To add a new space you should create a new folder inside `/notebooks`. 40 | 41 | Here are some requirements for the file structure: 42 | 43 | 1. Folder name must use `kebab-case` 44 | 2. Folder must contain a Jupyter Notebook called `notebook.ipynb` 45 | 3. Folder must contain a `meta.toml` file which holds information about your SingleStore Space. See below for the structure of this file. 46 | 47 | ### `meta.toml` file 48 | 49 | Your `meta.toml` file should have a `[meta]` section with the following keys: 50 | 51 | - title: string 52 | - description: string (optional) 53 | - difficulty: "beginner", "intermediate" or "advanced" 54 | - tags: string[] (optional) 55 | - icon: string. You don't need to reference the extension. See full list of icon names [here](https://github.com/singlestore-labs/spaces-notebooks/tree/master/common/images/header-icons) 56 | - destinations: list of strings. For now set it to ["spaces"]. 57 | - minimum_tier: If this notebook can be run on our free shared tier, please use "free-shared". Else use "standard" 58 | 59 | Example: 60 | 61 | ```toml 62 | [meta] 63 | title="Atlas & Kai for Mongo Side-by-Side" 64 | description="Compare performance on same code from simple to more complex queries" 65 | difficulty="beginner" 66 | tags=["mongodb", "kai"] 67 | destinations=["spaces"] 68 | icon="database" 69 | minimum_tier="free-shared" 70 | ``` 71 | 72 | 73 | ## Pre-commit checks on the clone of this repo 74 | 75 | The CI pipeline in this repo runs a bunch of validation checks and code reformatting with pre-commit checks. If you don't install those checks in your clone of the repo, the code will likely never pass. To install the pre-commit tool in your clone run the following from your clone directory. This will force the checks before you can push. 76 | 77 | You will need to develop your notebooks using Python 3.11 or higher. By default, Notebooks developed on SingleStore will be using this version of Python. This is required for the pre-commit checks to run: 78 | 79 | ```bash 80 | pip3 install pre-commit==3.7.1 81 | pre-commit install 82 | ``` 83 | 84 | The checks run automatically when you attempt to commit, but you can run them manually as well with the following: 85 | ```bash 86 | pre-commit run --all-files 87 | ``` 88 | -------------------------------------------------------------------------------- /resources/package-samples.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """Package sample notebooks for use in the managed service portal.""" 3 | import argparse 4 | import json 5 | import os 6 | import sys 7 | import tomllib 8 | from zipfile import ZipFile 9 | 10 | 11 | NOTEBOOK_FILE_NAME = 'notebook.ipynb' 12 | 13 | REQUIRED_FILES = [NOTEBOOK_FILE_NAME, 'meta.toml'] 14 | 15 | 16 | def strip_outputs(path: str) -> str: 17 | """Remove outputs from notebook at path.""" 18 | 19 | with open(path, 'r') as infile: 20 | nb = json.loads(infile.read()) 21 | 22 | for cell in nb['cells']: 23 | if 'metadata' in cell: 24 | cell['metadata']['execution'] = {} 25 | if 'outputs' in cell: 26 | cell['outputs'] = [] 27 | if 'metadata' in nb: 28 | if 'singlestore_connection' in nb['metadata']: 29 | nb['metadata']['singlestore_connection'] = {} 30 | 31 | return json.dumps(nb, indent=2) 32 | 33 | 34 | def get_valid_notebooks(notebooks: str, notebooks_directory: str) -> list[str]: 35 | """Return a list of valid notebooks""" 36 | 37 | notebook_names = [] 38 | 39 | if notebooks == 'sample': 40 | with open(args.toml, 'rb') as f: 41 | meta = tomllib.load(f) 42 | notebook_names = meta['samples']['display'] 43 | elif notebooks == 'all': 44 | # get all 45 | notebook_names = os.listdir(notebooks_directory) 46 | else: 47 | # comma-separated list 48 | notebook_names = list(map(str.strip, notebooks.split(','))) 49 | 50 | valid_notebooks = [] 51 | 52 | for notebook_name in notebook_names: 53 | 54 | for required_file in REQUIRED_FILES: 55 | path = os.path.join( 56 | args.notebooks_directory, 57 | notebook_name, 58 | required_file, 59 | ) 60 | 61 | if not os.path.isfile(path): 62 | print( 63 | f'error: Required file does not exist: {path}', 64 | file=sys.stderr, 65 | ) 66 | sys.exit(1) 67 | 68 | valid_notebooks.append(notebook_name) 69 | 70 | return valid_notebooks 71 | 72 | 73 | def convert_to_destination_path(path: str) -> str: 74 | """Remove 'notebooks' from path""" 75 | parts = path.split('/') 76 | filtered_parts = list(filter(lambda x: x != 'notebooks', parts)) 77 | 78 | return '/'.join(filtered_parts) 79 | 80 | 81 | if __name__ == '__main__': 82 | 83 | parser = argparse.ArgumentParser( 84 | description='package sample notebooks for use in ' 85 | 'the managed service portal', 86 | ) 87 | 88 | parser.add_argument( 89 | 'notebooks_directory', 90 | metavar='notebooks-directory', 91 | help='root `notebooks` directory', 92 | ) 93 | parser.add_argument( 94 | '--notebooks', 95 | help='which notebooks to package', 96 | default='all', 97 | required=True, 98 | ) 99 | parser.add_argument( 100 | '-t', '--toml', 101 | help='toml file containing configuration', 102 | default='meta.toml', 103 | ) 104 | parser.add_argument( 105 | '-o', '--outfile', 106 | help='name of the output file', 107 | default='sample-notebooks.zip', 108 | ) 109 | parser.add_argument( 110 | '-s', '--strip-outputs', 111 | help='strip the output cells from the notebooks', 112 | default=False, 113 | action=argparse.BooleanOptionalAction, 114 | ) 115 | 116 | args = parser.parse_args() 117 | 118 | valid_notebooks = get_valid_notebooks( 119 | notebooks=args.notebooks, 120 | notebooks_directory=args.notebooks_directory, 121 | ) 122 | 123 | with ZipFile(args.outfile, 'w') as out: 124 | for notebook_name in valid_notebooks: 125 | print(notebook_name) 126 | 127 | notebook_directory_path = os.path.join( 128 | args.notebooks_directory, 129 | notebook_name, 130 | ) 131 | 132 | notebook_path = os.path.join( 133 | notebook_directory_path, 134 | NOTEBOOK_FILE_NAME, 135 | ) 136 | 137 | # write the whole notebook directory 138 | for dirpath, dirs, files in os.walk(notebook_directory_path): 139 | for file in files: 140 | source = os.path.join(dirpath, file) 141 | destination = convert_to_destination_path(source) 142 | 143 | if source == notebook_path and args.strip_outputs: 144 | # write notebook with stripped output 145 | stripped_nodebook = strip_outputs(notebook_path) 146 | out.writestr(destination, stripped_nodebook) 147 | else: 148 | # write file normally 149 | out.write(source, arcname=destination) 150 | -------------------------------------------------------------------------------- /notebooks/integrating-with-pandas/data/iris.csv: -------------------------------------------------------------------------------- 1 | sepal_length,sepal_width,petal_length,petal_width,class 2 | 5.1,3.5,1.4,0.2,Iris-setosa 3 | 4.9,3,1.4,0.2,Iris-setosa 4 | 4.7,3.2,1.3,0.2,Iris-setosa 5 | 4.6,3.1,1.5,0.2,Iris-setosa 6 | 5,3.6,1.4,0.2,Iris-setosa 7 | 5.4,3.9,1.7,0.4,Iris-setosa 8 | 4.6,3.4,1.4,0.3,Iris-setosa 9 | 5,3.4,1.5,0.0,Iris-setosa 10 | 4.4,2.9,1.4,0.0,Iris-setosa 11 | 4.9,3.1,1.5,0.0,Iris-setosa 12 | 5.4,3.7,1.5,0.0,Iris-setosa 13 | 4.8,3.4,1.6,0.0,Iris-setosa 14 | 4.8,3,1.4,0.1,Iris-setosa 15 | 5.7,3,1.1,0.1,Iris-setosa 16 | 5.8,4,1.2,0.2,Iris-setosa 17 | 5.7,4.4,1.5,0.4,Iris-setosa 18 | 5.4,3.9,1.3,0.4,Iris-setosa 19 | 5.1,3.5,1.4,0.3,Iris-setosa 20 | 5.7,3.8,1.7,0.3,Iris-setossa 21 | 5.1,3.8,1.5,0.3,Iris-setosa 22 | 5.4,3.4,1.7,0.2,Iris-setosa 23 | 5.1,3.7,1.5,0.4,Iris-setosa 24 | 4.6,3.6,1,0.2,Iris-setosa 25 | 5.1,3.3,1.7,0.5,Iris-setosa 26 | 4.8,3.4,1.9,0.2,Iris-setosa 27 | 5,3,1.6,0.2,Iris-setosa 28 | 5,3.4,1.6,0.4,Iris-setosa 29 | 5.2,3.5,1.5,0.2,Iris-setosa 30 | 5.2,3.4,1.4,0.2,Iris-setosa 31 | 4.7,3.2,1.6,0.2,Iris-setosa 32 | 4.8,3.1,1.6,0.2,Iris-setosa 33 | 5.4,3.4,1.5,0.4,Iris-setosa 34 | 5.2,4.1,1.5,0.1,Iris-setosa 35 | 5.5,4.2,1.4,0.2,Iris-setosa 36 | 4.9,3.1,1.5,0.1,Iris-setosa 37 | 5,3.2,1.2,0.2,Iris-setosa 38 | 5.5,3.5,1.3,0.2,Iris-setosa 39 | 4.9,3.1,1.5,0.1,Iris-setosa 40 | 4.4,3,1.3,0.2,Iris-setosa 41 | 5.1,3.4,1.5,0.2,Iris-setosa 42 | 5,3.5,1.3,0.3,Iris-setosa 43 | 4.5,2.3,1.3,0.3,Iris-setosa 44 | 4.4,3.2,1.3,0.2,Iris-setosa 45 | 5,3.5,1.6,0.6,Iris-setosa 46 | 5.1,3.8,1.9,0.4,Iris-setosa 47 | 4.8,3,1.4,0.3,Iris-setosa 48 | 5.1,3.8,1.6,0.2,Iris-setosa 49 | 4.6,3.2,1.4,0.2,Iris-setosa 50 | 5.3,3.7,1.5,0.2,Iris-setosa 51 | 5,3.3,1.4,0.2,Iris-setosa 52 | 7,3.2,4.7,1.4,Iris-versicolor 53 | 6.4,3.2,4.5,1.5,Iris-versicolor 54 | 6.9,3.1,4.9,1.5,Iris-versicolor 55 | 5.5,2.3,4,1.3,Iris-versicolor 56 | 6.5,2.8,4.6,1.5,Iris-versicolor 57 | 5.7,2.8,4.5,1.3,Iris-versicolor 58 | 6.3,3.3,4.7,1.6,Iris-versicolor 59 | 4.9,2.4,3.3,1,Iris-versicolor 60 | 6.6,2.9,4.6,1.3,Iris-versicolor 61 | 5.2,2.7,3.9,1.4,Iris-versicolor 62 | 5,2,3.5,1,Iris-versicolor 63 | 5.9,3,4.2,1.5,Iris-versicolor 64 | 6,2.2,4,1,Iris-versicolor 65 | 6.1,2.9,4.7,1.4,Iris-versicolor 66 | 5.6,2.9,3.6,1.3,Iris-versicolor 67 | 6.7,3.1,4.4,1.4,Iris-versicolor 68 | 5.6,3,4.5,1.5,Iris-versicolor 69 | 5.8,2.7,4.1,1,Iris-versicolor 70 | 6.2,2.2,4.5,1.5,Iris-versicolor 71 | 5.6,2.5,3.9,1.1,Iris-versicolor 72 | 5.9,3.2,4.8,1.8,Iris-versicolor 73 | 6.1,2.8,4,1.3,Iris-versicolor 74 | 6.3,2.5,4.9,1.5,Iris-versicolor 75 | 6.1,2.8,4.7,1.2,Iris-versicolor 76 | 6.4,2.9,4.3,1.3,Iris-versicolor 77 | 6.6,3,4.4,1.4,Iris-versicolor 78 | 6.8,2.8,4.8,1.4,Iris-versicolor 79 | 0.067,3,5,1.7,Iris-versicolor 80 | 0.06,2.9,4.5,1.5,Iris-versicolor 81 | 0.057,2.6,3.5,1,Iris-versicolor 82 | 0.055,2.4,3.8,1.1,Iris-versicolor 83 | 0.055,2.4,3.7,1,Iris-versicolor 84 | 5.8,2.7,3.9,1.2,Iris-versicolor 85 | 6,2.8,5.1,1.6,Iris-versicolor 86 | 5.4,3,4.5,1.5,Iris-versicolor 87 | 6,3.4,4.5,1.6,Iris-versicolor 88 | 6.7,3.1,4.7,1.5,Iris-versicolor 89 | 6.3,2.3,4.4,1.3,Iris-versicolor 90 | 5.6,3,4.1,1.3,Iris-versicolor 91 | 5.5,2.5,4,1.3,Iris-versicolor 92 | 5.5,2.6,4.4,1.2,Iris-versicolor 93 | 6.1,3,4.6,1.4,Iris-versicolor 94 | 5.8,2.6,4,1.2,Iris-versicolor 95 | 5,2.3,3.3,1,Iris-versicolor 96 | 5.6,2.7,4.2,1.3,Iris-versicolor 97 | 5.7,3,4.2,1.2,versicolor 98 | 5.7,2.9,4.2,1.3,versicolor 99 | 6.2,2.9,4.3,1.3,versicolor 100 | 5.1,2.5,3,1.1,versicolor 101 | 5.7,2.8,4.1,1.3,versicolor 102 | 6.3,3.3,6,2.5,Iris-virginica 103 | 5.8,2.7,5.1,1.9,Iris-virginica 104 | 7.1,3,5.9,2.1,Iris-virginica 105 | 6.3,2.9,5.6,1.8,Iris-virginica 106 | 6.5,3,5.8,2.2,Iris-virginica 107 | 7.6,3,6.6,2.1,Iris-virginica 108 | 4.9,2.5,4.5,1.7,Iris-virginica 109 | 7.3,2.9,6.3,1.8,Iris-virginica 110 | 6.7,2.5,5.8,1.8,Iris-virginica 111 | 7.2,3.6,6.1,2.5,Iris-virginica 112 | 6.5,3.2,5.1,2,Iris-virginica 113 | 6.4,2.7,5.3,1.9,Iris-virginica 114 | 6.8,3,5.5,2.1,Iris-virginica 115 | 5.7,2.5,5,2,Iris-virginica 116 | 5.8,2.8,5.1,2.4,Iris-virginica 117 | 6.4,3.2,5.3,2.3,Iris-virginica 118 | 6.5,3,5.5,1.8,Iris-virginica 119 | 7.7,3.8,6.7,2.2,Iris-virginica 120 | 7.7,2.6,6.9,2.3,Iris-virginica 121 | 6,2.2,5,1.5,Iris-virginica 122 | 6.9,3.2,5.7,2.3,Iris-virginica 123 | 5.6,2.8,4.9,2,Iris-virginica 124 | 5.6,2.8,6.7,2,Iris-virginica 125 | 6.3,2.7,4.9,1.8,Iris-virginica 126 | 6.7,3.3,5.7,2.1,Iris-virginica 127 | 7.2,3.2,6,1.8,Iris-virginica 128 | 6.2,2.8,4.8,1.8,Iris-virginica 129 | 6.1,3,4.9,1.8,Iris-virginica 130 | 6.4,2.8,5.6,2.1,Iris-virginica 131 | 7.2,3,5.8,1.6,Iris-virginica 132 | 7.4,2.8,6.1,1.9,Iris-virginica 133 | 7.9,3.8,6.4,2,Iris-virginica 134 | 6.4,2.8,5.6,2.2,Iris-virginica 135 | 6.3,2.8,5.1,1.5,Iris-virginica 136 | 6.1,2.6,5.6,1.4,Iris-virginica 137 | 7.7,3,6.1,2.3,Iris-virginica 138 | 6.3,3.4,5.6,2.4,Iris-virginica 139 | 6.4,3.1,5.5,1.8,Iris-virginica 140 | 6,3,4.8,1.8,Iris-virginica 141 | 6.9,3.1,5.4,2.1,Iris-virginica 142 | 6.7,3.1,5.6,2.4,Iris-virginica 143 | 6.9,3.1,5.1,2.3,Iris-virginica 144 | 5.8,2.7,5.1,1.9,Iris-virginica 145 | 6.8,3.2,5.9,2.3,Iris-virginica 146 | 6.7,3.3,5.7,2.5,Iris-virginica 147 | 6.7,3,5.2,2.3,Iris-virginica 148 | 6.3,2.5,5,2.3,Iris-virginica 149 | 6.5,3,5.2,2,Iris-virginica 150 | 6.2,3.4,5.4,2.3,Iris-virginica 151 | 5.9,3,5.1,1.8,Iris-virginica 152 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | - Demonstrating empathy and kindness toward other people 21 | - Being respectful of differing opinions, viewpoints, and experiences 22 | - Giving and gracefully accepting constructive feedback 23 | - Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | - Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | - The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | - Trolling, insulting or derogatory comments, and personal or political attacks 33 | - Public or private harassment 34 | - Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | - Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | bhayes@singlestore.com. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /common/images/singlestore-full-logo-grey.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /notebooks/performance-troubleshooting/assets/templates/Result-1.template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | Plan Warnings - Report 8 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 21 | 25 | 26 |
18 | 20 | 22 | 24 |
27 | 28 | 29 |

Plan Warnings - Analysis

30 | 31 |
32 |

Background

33 |
34 |

35 | The plancache in SingleStore is a mechanism that stores compiled and executed query plans along with 36 | cumulative execution statistics for each plan. 37 |

38 |

When a query is executed for the first time, following action are performed

39 |
    40 |
  • code generation
  • 41 |
  • optimization
  • 42 |
  • translation to machine code
  • 43 |
44 | 45 |

Query is then stored in the plancache for later reuse. During this process, singlestore generates warnings, suggestions, It will beneficial to address them to improve performance

46 | 47 |

Ignoring plan cache warnings could lead to suboptimal query performance, as these warnings often indicate opportunities to optimize queries or configurations that can improve the overall efficiency of the database operations.

48 | 49 |

Using the MV_QUERY_ACTIVITIES_EXTENDED_CUMULATIVE view as a reference, you can monitor various performance metrics, including cpu_time_ms, memory_bs, and plan_warnings, to analyze and identify performance bottlenecks in your queries

50 | 51 |

Utilizing plan cache warnings in SingleStore offers several benefits

52 |
    53 |
  1. Performance Optimization Addressing plan cache warnings ensures query plans are efficient, improving performance and reducing resource usage.
  2. 54 | 55 |
  3. Insight into Query Plan Stability Identifying parameter-dependent query plans helps stabilize performance.
  4. 56 | 57 |
  5. Resource Management Monitoring warnings helps optimize resource usage and maintain system stability.
  6. 58 | 59 |
  7. Proactive Problem-Solving Plan cache warnings allow for proactive issue identification and resolution.
  8. 60 | 61 |
  9. Query Execution Insights They provide valuable insights for debugging and understanding database engine behavior.
  10. 62 |
63 |

64 | Overall, plan cache warnings are crucial for maintaining and enhancing SingleStore database performance, stability, and efficiency. 65 |

66 |
67 |
68 | 69 |
70 |

Actions

71 |
72 |
    73 |
  1. 74 | Review Warnings Examine warnings in the plan_warnings column of the plancache or 75 | through the 76 | EXPLAIN command 77 |
  2. 78 |
  3. Check Data Types Validate whether compared data types are expected to 79 | differ; adjust schema 80 | or queries accordingly
  4. 81 |
  5. Data Types If necessary, adjust data types of mismatched columns for 82 | compatibility and 83 | effective index usage
  6. 84 |
  7. Explicit Type Conversion If changing data types is impractical, add 85 | explicit type conversion 86 | operations using cast operators or conversion fun
  8. 87 |
88 |
89 |
90 | 91 |
92 |

Results

93 |
94 | rstable 95 |
96 | 97 |
Refer File: datatype_mismatch_comparision_report.xlsx for detailed analysis
98 |
99 | 100 |
101 |

References

102 | 109 | 110 |
111 | 112 |
113 | 114 |
115 | 116 | 117 | 120 | 121 | 122 | 123 | 135 | 136 | 137 | 138 | 139 | -------------------------------------------------------------------------------- /notebooks/kebab-case/notebook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "id": "9c00b945", 5 | "cell_type": "markdown", 6 | "metadata": {}, 7 | "source": [ 8 | "
\n", 9 | "
\n", 10 | " \n", 11 | "
\n", 12 | "
\n", 13 | "
SingleStore Notebooks
\n", 14 | "

Automatically warm up your disk after resuming workspace

\n", 15 | "
\n", 16 | "
" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "## Step 0. Import libraries & create connection to database" 24 | ], 25 | "id": "bd2b3254" 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 1, 30 | "metadata": {}, 31 | "outputs": [], 32 | "source": [ 33 | "import pandas as pd\n", 34 | "import singlestoredb as s2\n", 35 | "s2_conn = s2.connect()\n", 36 | "s2_cur = s2_conn.cursor()" 37 | ], 38 | "id": "27973c95" 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "## Step 1. Specify which database you want to cache" 45 | ], 46 | "id": "46be6948" 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 2, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "database_name = input('Enter database name:')" 55 | ], 56 | "id": "9590c72f" 57 | }, 58 | { 59 | "cell_type": "markdown", 60 | "metadata": {}, 61 | "source": [ 62 | "## Step 2. Get a list of the columnstore table names in your database" 63 | ], 64 | "id": "9d80057f" 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 3, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "query = \"\"\"SELECT table_name FROM information_schema.tables WHERE table_schema = '{}' AND table_type = 'BASE TABLE' AND storage_type = 'COLUMNSTORE';\"\"\".format(database_name)\n", 73 | "result = s2_cur.execute(query)\n", 74 | "result_df = pd.DataFrame(list(s2_cur))\n", 75 | "list_of_tables = result_df[[0]].values" 76 | ], 77 | "id": "bd63ec02" 78 | }, 79 | { 80 | "cell_type": "markdown", 81 | "metadata": {}, 82 | "source": [ 83 | "## Step 3. Cache columnar files" 84 | ], 85 | "id": "c6f080b3" 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 4, 90 | "metadata": {}, 91 | "outputs": [], 92 | "source": [ 93 | "# get queries to warm up columm files\n", 94 | "column_queries_df = pd.DataFrame()\n", 95 | "\n", 96 | "for table_name in list_of_tables:\n", 97 | " query = \"\"\" WITH t1 AS (SELECT c.column_name, c.ordinal_position, MAX(c.ordinal_position) OVER (ORDER BY c.ordinal_position DESC) AS last_row FROM information_schema.columns c JOIN information_schema.tables t ON c.table_catalog = t.table_catalog AND c.table_schema = t.table_schema AND c.table_name = t.table_name WHERE c.table_schema = '{0}' AND c.table_name = '{1}') SELECT CASE WHEN ordinal_position = 1 AND ordinal_position = last_row THEN CONCAT('SELECT ', 'AVG(LENGTH(`',column_name,'`)) FROM ', '{1}') WHEN ordinal_position = 1 and ordinal_position != last_row THEN CONCAT('SELECT ', 'AVG(LENGTH(`',column_name,'`)),') WHEN ordinal_position != last_row THEN CONCAT('AVG(LENGTH(`',column_name,'`)),') ELSE CONCAT('AVG(LENGTH(`',column_name,'`)) FROM ', '{1}') END AS query_text FROM t1 ORDER BY ordinal_position; \"\"\".format(database_name, table_name[0])\n", 98 | " result = s2_cur.execute(query)\n", 99 | " result_df = pd.DataFrame(list(s2_cur))\n", 100 | " result_df['table_name'] = table_name[0]\n", 101 | " column_queries_df = pd.concat([column_queries_df, result_df], axis=0)\n", 102 | "\n", 103 | "column_queries_df.rename(columns = {0:'query_text'}, inplace = True)\n", 104 | "final_column_df = column_queries_df.groupby('table_name')['query_text'].apply(' '.join).reset_index()\n", 105 | "final_column_df['query_text'] = final_column_df['query_text'].astype(str) + ';'\n", 106 | "\n", 107 | "# run column file warm up queries\n", 108 | "for query in final_column_df[['query_text']].values:\n", 109 | " s2_cur.execute(\"\"\" {} \"\"\".format(query[0]))" 110 | ], 111 | "id": "af25d90b" 112 | }, 113 | { 114 | "cell_type": "markdown", 115 | "metadata": {}, 116 | "source": [ 117 | "## Step 4. Cache index files" 118 | ], 119 | "id": "3295c1ab" 120 | }, 121 | { 122 | "cell_type": "code", 123 | "execution_count": 5, 124 | "metadata": {}, 125 | "outputs": [], 126 | "source": [ 127 | "# get queries to warm up index files\n", 128 | "index_queries_df = pd.DataFrame()\n", 129 | "\n", 130 | "for table_name in list_of_tables:\n", 131 | " query = \"\"\" SELECT DISTINCT CONCAT(\"OPTIMIZE TABLE \", table_name, \" WARM BLOB CACHE FOR INDEX \", \"`\", index_name, \"`\", \";\") FROM information_schema.statistics WHERE TABLE_SCHEMA = '{}' AND index_type = 'COLUMNSTORE HASH' AND table_name = '{}'; \"\"\".format(database_name, table_name[0])\n", 132 | " result = s2_cur.execute(query)\n", 133 | " result_df = pd.DataFrame(list(s2_cur))\n", 134 | " index_queries_df = pd.concat([index_queries_df, result_df], axis=0)\n", 135 | "\n", 136 | "# run index file warm up queries\n", 137 | "for query in index_queries_df.values:\n", 138 | " s2_cur.execute(\"\"\" {} \"\"\".format(query[0]))" 139 | ], 140 | "id": "1bf9836c" 141 | }, 142 | { 143 | "id": "3921cac6", 144 | "cell_type": "markdown", 145 | "metadata": {}, 146 | "source": [ 147 | "
\n", 148 | "
" 149 | ] 150 | } 151 | ], 152 | "metadata": { 153 | "jupyterlab": { 154 | "notebooks": { 155 | "version_major": 6, 156 | "version_minor": 4 157 | } 158 | }, 159 | "kernelspec": { 160 | "display_name": "Python 3 (ipykernel)", 161 | "language": "python", 162 | "name": "python3" 163 | }, 164 | "language_info": { 165 | "codemirror_mode": { 166 | "name": "ipython", 167 | "version": 3 168 | }, 169 | "file_extension": ".py", 170 | "mimetype": "text/x-python", 171 | "name": "python", 172 | "nbconvert_exporter": "python", 173 | "pygments_lexer": "ipython3", 174 | "version": "3.11.4" 175 | } 176 | }, 177 | "nbformat": 4, 178 | "nbformat_minor": 5 179 | } 180 | -------------------------------------------------------------------------------- /notebooks/performance-troubleshooting/assets/templates/Result-5.template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Duplicate Columns 7 | 13 | 17 | 18 | 19 | 20 | 29 | 30 | 44 | 58 | 59 |
39 | 43 | 53 | 57 |
60 | 61 |

62 | Query to find duplicate columns 63 |

64 | 65 |
66 |

Background

67 |
68 |

69 | Duplicate columns in a database schema can lead to several challenges and inefficiencies, impacting both performance and data integrity 70 |

71 | 72 |

The downsides of duplicate columns include

73 |
    74 |
  • 75 | Increased Storage UsageDuplicate columns consume additional storage space unnecessarily. This can be particularly impactful in large-scale databases where storage efficiency is crucial. 76 |
  • 77 |
  • 78 | Data Redundancy and Inconsistency Redundancy can lead to inconsistency if the data in these columns is not kept synchronized, potentially leading to anomalies and errors in data retrieval and analysis. 79 |
  • 80 |
  • 81 | Complicated QueriesHaving duplicate columns can complicate query writing and maintenance. Developers might need to remember which column to use for specific queries or might accidentally use the wrong one, leading to incorrect query results. 82 |
  • 83 |
  • 84 | Performance Overhead Duplicate columns can cause performance overheads, especially for write operations like INSERT, UPDATE, and DELETE. These operations might take longer to execute as they have to deal with more data than necessary. It can also impact the performance of indexing and other optimization features. 85 |
  • 86 |
  • 87 | Impact on Backup and Recovery Duplicate columns can increase the size of backups, making the backup and recovery process take longer. This can be particularly impactful in disaster recovery scenarios where time to restore operations is critical. 88 |
  • 89 |
90 | 91 | 92 |
93 |
94 | 95 |
96 |

Actions

97 |
98 |
    99 |
  1. 100 | Schema Analysis Analyze your database schema to identify columns that store similar data across different tables or within the same table. 101 |
  2. 102 |
  3. 103 | Data Normalization Apply normalization principles to redesign the schema. Normalization involves organizing your database to reduce redundancy and improve data integrity 104 |
  4. 105 |
  5. 106 | Migrate Data Once you have a new schema design, migrate the data from your old structure to the new one. 107 |
  6. 108 |
  7. 109 | Update Applications If you have applications or services that interact with the database, you'll need to update them to work with the new schema. 110 |
  8. 111 |
  9. 112 | Testing Thoroughly test the new schema with your applications to ensure that everything functions correctly and performance meets your requirements 113 | key. 114 |
  10. 115 |
  11. 116 | Performance Tuning After the schema changes are implemented, monitor the database's performance and adjust indexes, query designs, and configurations as needed to optimize performance. 117 |
  12. 118 |
  13. 119 | Documentation and Maintenance Document the changes made to the database schema, including the rationale for changes and any implications for database maintenance or application development. Keep this documentation updated as further changes are made. 120 |
  14. 121 |
122 | 123 |

124 | In summary, while specific behaviors and impacts might vary depending on the database system's implementation and the specific schema design, it's generally advisable to avoid duplicate columns in a database schema to maintain efficiency, performance, and data integrity. Always consider normalization 125 | 126 |

127 |
128 |
129 | 130 |
131 |
rstable
132 |
133 | 134 | 145 | 146 |
147 | 155 |
156 | 157 | 162 | 167 | 171 | 175 | 176 | 188 | 189 | 190 | -------------------------------------------------------------------------------- /notebooks/real-time-recommendation-engine/singlestore_bundle.pem: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIF3jCCA8agAwIBAgIQAf1tMPyjylGoG7xkDjUDLTANBgkqhkiG9w0BAQwFADCB 3 | iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl 4 | cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV 5 | BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTAw 6 | MjAxMDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCBiDELMAkGA1UEBhMCVVMxEzARBgNV 7 | BAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0plcnNleSBDaXR5MR4wHAYDVQQKExVU 8 | aGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNVBAMTJVVTRVJUcnVzdCBSU0EgQ2Vy 9 | dGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIK 10 | AoICAQCAEmUXNg7D2wiz0KxXDXbtzSfTTK1Qg2HiqiBNCS1kCdzOiZ/MPans9s/B 11 | 3PHTsdZ7NygRK0faOca8Ohm0X6a9fZ2jY0K2dvKpOyuR+OJv0OwWIJAJPuLodMkY 12 | tJHUYmTbf6MG8YgYapAiPLz+E/CHFHv25B+O1ORRxhFnRghRy4YUVD+8M/5+bJz/ 13 | Fp0YvVGONaanZshyZ9shZrHUm3gDwFA66Mzw3LyeTP6vBZY1H1dat//O+T23LLb2 14 | VN3I5xI6Ta5MirdcmrS3ID3KfyI0rn47aGYBROcBTkZTmzNg95S+UzeQc0PzMsNT 15 | 79uq/nROacdrjGCT3sTHDN/hMq7MkztReJVni+49Vv4M0GkPGw/zJSZrM233bkf6 16 | c0Plfg6lZrEpfDKEY1WJxA3Bk1QwGROs0303p+tdOmw1XNtB1xLaqUkL39iAigmT 17 | Yo61Zs8liM2EuLE/pDkP2QKe6xJMlXzzawWpXhaDzLhn4ugTncxbgtNMs+1b/97l 18 | c6wjOy0AvzVVdAlJ2ElYGn+SNuZRkg7zJn0cTRe8yexDJtC/QV9AqURE9JnnV4ee 19 | UB9XVKg+/XRjL7FQZQnmWEIuQxpMtPAlR1n6BB6T1CZGSlCBst6+eLf8ZxXhyVeE 20 | Hg9j1uliutZfVS7qXMYoCAQlObgOK6nyTJccBz8NUvXt7y+CDwIDAQABo0IwQDAd 21 | BgNVHQ4EFgQUU3m/WqorSs9UgOHYm8Cd8rIDZsswDgYDVR0PAQH/BAQDAgEGMA8G 22 | A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEMBQADggIBAFzUfA3P9wF9QZllDHPF 23 | Up/L+M+ZBn8b2kMVn54CVVeWFPFSPCeHlCjtHzoBN6J2/FNQwISbxmtOuowhT6KO 24 | VWKR82kV2LyI48SqC/3vqOlLVSoGIG1VeCkZ7l8wXEskEVX/JJpuXior7gtNn3/3 25 | ATiUFJVDBwn7YKnuHKsSjKCaXqeYalltiz8I+8jRRa8YFWSQEg9zKC7F4iRO/Fjs 26 | 8PRF/iKz6y+O0tlFYQXBl2+odnKPi4w2r78NBc5xjeambx9spnFixdjQg3IM8WcR 27 | iQycE0xyNN+81XHfqnHd4blsjDwSXWXavVcStkNr/+XeTWYRUc+ZruwXtuhxkYze 28 | Sf7dNXGiFSeUHM9h4ya7b6NnJSFd5t0dCy5oGzuCr+yDZ4XUmFF0sbmZgIn/f3gZ 29 | XHlKYC6SQK5MNyosycdiyA5d9zZbyuAlJQG03RoHnHcAP9Dc1ew91Pq7P8yF1m9/ 30 | qS3fuQL39ZeatTXaw2ewh0qpKJ4jjv9cJ2vhsE/zB+4ALtRZh8tSQZXq9EfX7mRB 31 | VXyNWQKV3WKdwrnuWih0hKWbt5DHDAff9Yk2dDLWKMGwsAvgnEzDHNb842m1R0aB 32 | L6KCq9NjRHDEjf8tM7qtj3u1cIiuPhnPQCjY/MiQu12ZIvVS5ljFH4gxQ+6IHdfG 33 | jjxDah2nGN59PRbxYvnKkKj9 34 | -----END CERTIFICATE----- 35 | -----BEGIN CERTIFICATE----- 36 | MIIGEzCCA/ugAwIBAgIQfVtRJrR2uhHbdBYLvFMNpzANBgkqhkiG9w0BAQwFADCB 37 | iDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCk5ldyBKZXJzZXkxFDASBgNVBAcTC0pl 38 | cnNleSBDaXR5MR4wHAYDVQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxLjAsBgNV 39 | BAMTJVVTRVJUcnVzdCBSU0EgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMTgx 40 | MTAyMDAwMDAwWhcNMzAxMjMxMjM1OTU5WjCBjzELMAkGA1UEBhMCR0IxGzAZBgNV 41 | BAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEYMBYGA1UE 42 | ChMPU2VjdGlnbyBMaW1pdGVkMTcwNQYDVQQDEy5TZWN0aWdvIFJTQSBEb21haW4g 43 | VmFsaWRhdGlvbiBTZWN1cmUgU2VydmVyIENBMIIBIjANBgkqhkiG9w0BAQEFAAOC 44 | AQ8AMIIBCgKCAQEA1nMz1tc8INAA0hdFuNY+B6I/x0HuMjDJsGz99J/LEpgPLT+N 45 | TQEMgg8Xf2Iu6bhIefsWg06t1zIlk7cHv7lQP6lMw0Aq6Tn/2YHKHxYyQdqAJrkj 46 | eocgHuP/IJo8lURvh3UGkEC0MpMWCRAIIz7S3YcPb11RFGoKacVPAXJpz9OTTG0E 47 | oKMbgn6xmrntxZ7FN3ifmgg0+1YuWMQJDgZkW7w33PGfKGioVrCSo1yfu4iYCBsk 48 | Haswha6vsC6eep3BwEIc4gLw6uBK0u+QDrTBQBbwb4VCSmT3pDCg/r8uoydajotY 49 | uK3DGReEY+1vVv2Dy2A0xHS+5p3b4eTlygxfFQIDAQABo4IBbjCCAWowHwYDVR0j 50 | BBgwFoAUU3m/WqorSs9UgOHYm8Cd8rIDZsswHQYDVR0OBBYEFI2MXsRUrYrhd+mb 51 | +ZsF4bgBjWHhMA4GA1UdDwEB/wQEAwIBhjASBgNVHRMBAf8ECDAGAQH/AgEAMB0G 52 | A1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjAbBgNVHSAEFDASMAYGBFUdIAAw 53 | CAYGZ4EMAQIBMFAGA1UdHwRJMEcwRaBDoEGGP2h0dHA6Ly9jcmwudXNlcnRydXN0 54 | LmNvbS9VU0VSVHJ1c3RSU0FDZXJ0aWZpY2F0aW9uQXV0aG9yaXR5LmNybDB2Bggr 55 | BgEFBQcBAQRqMGgwPwYIKwYBBQUHMAKGM2h0dHA6Ly9jcnQudXNlcnRydXN0LmNv 56 | bS9VU0VSVHJ1c3RSU0FBZGRUcnVzdENBLmNydDAlBggrBgEFBQcwAYYZaHR0cDov 57 | L29jc3AudXNlcnRydXN0LmNvbTANBgkqhkiG9w0BAQwFAAOCAgEAMr9hvQ5Iw0/H 58 | ukdN+Jx4GQHcEx2Ab/zDcLRSmjEzmldS+zGea6TvVKqJjUAXaPgREHzSyrHxVYbH 59 | 7rM2kYb2OVG/Rr8PoLq0935JxCo2F57kaDl6r5ROVm+yezu/Coa9zcV3HAO4OLGi 60 | H19+24rcRki2aArPsrW04jTkZ6k4Zgle0rj8nSg6F0AnwnJOKf0hPHzPE/uWLMUx 61 | RP0T7dWbqWlod3zu4f+k+TY4CFM5ooQ0nBnzvg6s1SQ36yOoeNDT5++SR2RiOSLv 62 | xvcRviKFxmZEJCaOEDKNyJOuB56DPi/Z+fVGjmO+wea03KbNIaiGCpXZLoUmGv38 63 | sbZXQm2V0TP2ORQGgkE49Y9Y3IBbpNV9lXj9p5v//cWoaasm56ekBYdbqbe4oyAL 64 | l6lFhd2zi+WJN44pDfwGF/Y4QA5C5BIG+3vzxhFoYt/jmPQT2BVPi7Fp2RBgvGQq 65 | 6jG35LWjOhSbJuMLe/0CjraZwTiXWTb2qHSihrZe68Zk6s+go/lunrotEbaGmAhY 66 | LcmsJWTyXnW0OMGuf1pGg+pRyrbxmRE1a6Vqe8YAsOf4vmSyrcjC8azjUeqkk+B5 67 | yOGBQMkKW+ESPMFgKuOXwIlCypTPRpgSabuY0MLTDXJLR27lk8QyKGOHQ+SwMj4K 68 | 00u/I5sUKUErmgQfky3xxzlIPK1aEn8= 69 | -----END CERTIFICATE----- 70 | -----BEGIN CERTIFICATE----- 71 | MIIFazCCA1OgAwIBAgIRAIIQz7DSQONZRGPgu2OCiwAwDQYJKoZIhvcNAQELBQAw 72 | TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh 73 | cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMTUwNjA0MTEwNDM4 74 | WhcNMzUwNjA0MTEwNDM4WjBPMQswCQYDVQQGEwJVUzEpMCcGA1UEChMgSW50ZXJu 75 | ZXQgU2VjdXJpdHkgUmVzZWFyY2ggR3JvdXAxFTATBgNVBAMTDElTUkcgUm9vdCBY 76 | MTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK3oJHP0FDfzm54rVygc 77 | h77ct984kIxuPOZXoHj3dcKi/vVqbvYATyjb3miGbESTtrFj/RQSa78f0uoxmyF+ 78 | 0TM8ukj13Xnfs7j/EvEhmkvBioZxaUpmZmyPfjxwv60pIgbz5MDmgK7iS4+3mX6U 79 | A5/TR5d8mUgjU+g4rk8Kb4Mu0UlXjIB0ttov0DiNewNwIRt18jA8+o+u3dpjq+sW 80 | T8KOEUt+zwvo/7V3LvSye0rgTBIlDHCNAymg4VMk7BPZ7hm/ELNKjD+Jo2FR3qyH 81 | B5T0Y3HsLuJvW5iB4YlcNHlsdu87kGJ55tukmi8mxdAQ4Q7e2RCOFvu396j3x+UC 82 | B5iPNgiV5+I3lg02dZ77DnKxHZu8A/lJBdiB3QW0KtZB6awBdpUKD9jf1b0SHzUv 83 | KBds0pjBqAlkd25HN7rOrFleaJ1/ctaJxQZBKT5ZPt0m9STJEadao0xAH0ahmbWn 84 | OlFuhjuefXKnEgV4We0+UXgVCwOPjdAvBbI+e0ocS3MFEvzG6uBQE3xDk3SzynTn 85 | jh8BCNAw1FtxNrQHusEwMFxIt4I7mKZ9YIqioymCzLq9gwQbooMDQaHWBfEbwrbw 86 | qHyGO0aoSCqI3Haadr8faqU9GY/rOPNk3sgrDQoo//fb4hVC1CLQJ13hef4Y53CI 87 | rU7m2Ys6xt0nUW7/vGT1M0NPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNV 88 | HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR5tFnme7bl5AFzgAiIyBpY9umbbjANBgkq 89 | hkiG9w0BAQsFAAOCAgEAVR9YqbyyqFDQDLHYGmkgJykIrGF1XIpu+ILlaS/V9lZL 90 | ubhzEFnTIZd+50xx+7LSYK05qAvqFyFWhfFQDlnrzuBZ6brJFe+GnY+EgPbk6ZGQ 91 | 3BebYhtF8GaV0nxvwuo77x/Py9auJ/GpsMiu/X1+mvoiBOv/2X/qkSsisRcOj/KK 92 | NFtY2PwByVS5uCbMiogziUwthDyC3+6WVwW6LLv3xLfHTjuCvjHIInNzktHCgKQ5 93 | ORAzI4JMPJ+GslWYHb4phowim57iaztXOoJwTdwJx4nLCgdNbOhdjsnvzqvHu7Ur 94 | TkXWStAmzOVyyghqpZXjFaH3pO3JLF+l+/+sKAIuvtd7u+Nxe5AW0wdeRlN8NwdC 95 | jNPElpzVmbUq4JUagEiuTDkHzsxHpFKVK7q4+63SM1N95R1NbdWhscdCb+ZAJzVc 96 | oyi3B43njTOQ5yOf+1CceWxG1bQVs5ZufpsMljq4Ui0/1lvh+wjChP4kqKOJ2qxq 97 | 4RgqsahDYVvTH9w7jXbyLeiNdd8XM2w9U/t7y0Ff/9yi0GE44Za4rF2LN9d11TPA 98 | mRGunUHBcnWEvgJBQl9nJEiU0Zsnvgc/ubhPgXRR4Xq37Z0j4r7g1SgEEzwxA57d 99 | emyPxgcYxn/eR44/KJ4EBs+lVDR3veyJm+kXQ99b21/+jh5Xos1AnX5iItreGCc= 100 | -----END CERTIFICATE----- 101 | -----BEGIN CERTIFICATE----- 102 | MIIFFjCCAv6gAwIBAgIRAJErCErPDBinU/bWLiWnX1owDQYJKoZIhvcNAQELBQAw 103 | TzELMAkGA1UEBhMCVVMxKTAnBgNVBAoTIEludGVybmV0IFNlY3VyaXR5IFJlc2Vh 104 | cmNoIEdyb3VwMRUwEwYDVQQDEwxJU1JHIFJvb3QgWDEwHhcNMjAwOTA0MDAwMDAw 105 | WhcNMjUwOTE1MTYwMDAwWjAyMQswCQYDVQQGEwJVUzEWMBQGA1UEChMNTGV0J3Mg 106 | RW5jcnlwdDELMAkGA1UEAxMCUjMwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEK 107 | AoIBAQC7AhUozPaglNMPEuyNVZLD+ILxmaZ6QoinXSaqtSu5xUyxr45r+XXIo9cP 108 | R5QUVTVXjJ6oojkZ9YI8QqlObvU7wy7bjcCwXPNZOOftz2nwWgsbvsCUJCWH+jdx 109 | sxPnHKzhm+/b5DtFUkWWqcFTzjTIUu61ru2P3mBw4qVUq7ZtDpelQDRrK9O8Zutm 110 | NHz6a4uPVymZ+DAXXbpyb/uBxa3Shlg9F8fnCbvxK/eG3MHacV3URuPMrSXBiLxg 111 | Z3Vms/EY96Jc5lP/Ooi2R6X/ExjqmAl3P51T+c8B5fWmcBcUr2Ok/5mzk53cU6cG 112 | /kiFHaFpriV1uxPMUgP17VGhi9sVAgMBAAGjggEIMIIBBDAOBgNVHQ8BAf8EBAMC 113 | AYYwHQYDVR0lBBYwFAYIKwYBBQUHAwIGCCsGAQUFBwMBMBIGA1UdEwEB/wQIMAYB 114 | Af8CAQAwHQYDVR0OBBYEFBQusxe3WFbLrlAJQOYfr52LFMLGMB8GA1UdIwQYMBaA 115 | FHm0WeZ7tuXkAXOACIjIGlj26ZtuMDIGCCsGAQUFBwEBBCYwJDAiBggrBgEFBQcw 116 | AoYWaHR0cDovL3gxLmkubGVuY3Iub3JnLzAnBgNVHR8EIDAeMBygGqAYhhZodHRw 117 | Oi8veDEuYy5sZW5jci5vcmcvMCIGA1UdIAQbMBkwCAYGZ4EMAQIBMA0GCysGAQQB 118 | gt8TAQEBMA0GCSqGSIb3DQEBCwUAA4ICAQCFyk5HPqP3hUSFvNVneLKYY611TR6W 119 | PTNlclQtgaDqw+34IL9fzLdwALduO/ZelN7kIJ+m74uyA+eitRY8kc607TkC53wl 120 | ikfmZW4/RvTZ8M6UK+5UzhK8jCdLuMGYL6KvzXGRSgi3yLgjewQtCPkIVz6D2QQz 121 | CkcheAmCJ8MqyJu5zlzyZMjAvnnAT45tRAxekrsu94sQ4egdRCnbWSDtY7kh+BIm 122 | lJNXoB1lBMEKIq4QDUOXoRgffuDghje1WrG9ML+Hbisq/yFOGwXD9RiX8F6sw6W4 123 | avAuvDszue5L3sz85K+EC4Y/wFVDNvZo4TYXao6Z0f+lQKc0t8DQYzk1OXVu8rp2 124 | yJMC6alLbBfODALZvYH7n7do1AZls4I9d1P4jnkDrQoxB3UqQ9hVl3LEKQ73xF1O 125 | yK5GhDDX8oVfGKF5u+decIsH4YaTw7mP3GFxJSqv3+0lUFJoi5Lc5da149p90Ids 126 | hCExroL1+7mryIkXPeFM5TgO9r0rvZaBFOvV2z0gp35Z0+L4WPlbuEjN/lxPFin+ 127 | HlUjr8gRsI3qfJOQFy/9rKIJR0Y/8Omwt/8oTWgy1mdeHmmjk7j1nYsvC9JSQ6Zv 128 | MldlTTKB3zhThV1+XWYp6rjd5JW1zbVWEkLNxE7GJThEUG3szgBVGP7pSWTUTsqX 129 | nLRbwHOoq7hHwg== 130 | -----END CERTIFICATE----- 131 | -------------------------------------------------------------------------------- /notebooks/representing-unstructured-data-as-vectors/notebook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "id": "0ec251f1", 5 | "cell_type": "markdown", 6 | "metadata": {}, 7 | "source": [ 8 | "
\n", 9 | "
\n", 10 | " \n", 11 | "
\n", 12 | "
\n", 13 | "
SingleStore Notebooks
\n", 14 | "

Representing Unstructured Data as Vectors

\n", 15 | "
\n", 16 | "
" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "## Representing Unstructured Data as Vectors\n", 24 | "Visualizing these vectors and measuring the distance between these vectors using various methods such as Manhattan Distance, Euclidean Distance, Cosine Distance & Dot Product" 25 | ], 26 | "id": "565b961f" 27 | }, 28 | { 29 | "cell_type": "markdown", 30 | "metadata": {}, 31 | "source": [ 32 | "Let's take an example of two pets and visualize them in a 3D space. We will try to find the Manhattan Distance, Euclidean Distance, Cosine Distance & Dot Product between these two pets." 33 | ], 34 | "id": "7ac857fe" 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "Hypothetically consider these vectors for to represent dog and cat.\n", 41 | "dog = [5, 30, 2]\n", 42 | "cat = [3, 25, 4]" 43 | ], 44 | "id": "916063d3" 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "### Install the libraries required" 51 | ], 52 | "id": "bbae7661" 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 1, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "!pip install numpy matplotlib --quiet" 61 | ], 62 | "id": "386e29fc" 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "metadata": {}, 67 | "source": [ 68 | "### Import the libraries" 69 | ], 70 | "id": "7207d8e9" 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 2, 75 | "metadata": {}, 76 | "outputs": [], 77 | "source": [ 78 | "import matplotlib.pyplot as plt\n", 79 | "import numpy as np" 80 | ], 81 | "id": "8d9089b4" 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 3, 86 | "metadata": {}, 87 | "outputs": [], 88 | "source": [ 89 | "from mpl_toolkits.mplot3d import Axes3D\n", 90 | "\n", 91 | "# Example pets attributes: [weight, height, age]\n", 92 | "# These are hypothetical numbers for illustration purposes\n", 93 | "dog = [5, 30, 2]\n", 94 | "cat = [3, 25, 4]\n", 95 | "\n", 96 | "fig = plt.figure()\n", 97 | "ax = fig.add_subplot(111, projection='3d')\n", 98 | "\n", 99 | "# Plotting the pets\n", 100 | "ax.scatter(dog[0], dog[1], dog[2], label=\"Dog\", c='blue')\n", 101 | "ax.scatter(cat[0], cat[1], cat[2], label=\"Cat\", c='green')\n", 102 | "\n", 103 | "# Drawing lines from the origin to the points\n", 104 | "ax.quiver(0, 0, 0, dog[0], dog[1], dog[2], color='blue', arrow_length_ratio=0.1)\n", 105 | "ax.quiver(0, 0, 0, cat[0], cat[1], cat[2], color='green', arrow_length_ratio=0.1)\n", 106 | "\n", 107 | "# Labeling the axes\n", 108 | "ax.set_xlabel('Weight (kg)')\n", 109 | "ax.set_ylabel('Height (cm)')\n", 110 | "ax.set_zlabel('Age (years)')\n", 111 | "\n", 112 | "# Setting the limits for better visualization\n", 113 | "ax.set_xlim(0, 10)\n", 114 | "ax.set_ylim(0, 40)\n", 115 | "ax.set_zlim(0, 5)\n", 116 | "\n", 117 | "# Adding legend and title\n", 118 | "ax.legend()\n", 119 | "ax.set_title('3D Representation of Pets')\n", 120 | "\n", 121 | "plt.show()" 122 | ], 123 | "id": "ea181a08" 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "metadata": {}, 128 | "source": [ 129 | "### Manhattan Distance" 130 | ], 131 | "id": "988ef57c" 132 | }, 133 | { 134 | "cell_type": "markdown", 135 | "metadata": {}, 136 | "source": [ 137 | "Manhattan distance is like calculating the total distance you would travel between two points (dog and cat here) if you could only move in straight lines" 138 | ], 139 | "id": "7252eade" 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": 4, 144 | "metadata": {}, 145 | "outputs": [], 146 | "source": [ 147 | "L1 = [abs(dog[i] - cat[i]) for i in range(len(dog))]\n", 148 | "sum(L1)" 149 | ], 150 | "id": "5bddfc07" 151 | }, 152 | { 153 | "cell_type": "markdown", 154 | "metadata": {}, 155 | "source": [ 156 | "### Euclidean Distance" 157 | ], 158 | "id": "5ab22a1e" 159 | }, 160 | { 161 | "cell_type": "markdown", 162 | "metadata": {}, 163 | "source": [ 164 | "Euclidean distance is like the straight-line distance between two points, as if you could draw a straight line from one point to another, not limited by any paths or grids, similar to how a bird would fly directly between two locations." 165 | ], 166 | "id": "f5bcdc90" 167 | }, 168 | { 169 | "cell_type": "code", 170 | "execution_count": 5, 171 | "metadata": {}, 172 | "outputs": [], 173 | "source": [ 174 | "L2 = [(dog[i] - cat[i])**2 for i in range(len(dog))]\n", 175 | "\n", 176 | "L2 = np.sqrt(np.array(L2).sum())\n", 177 | "L2" 178 | ], 179 | "id": "ec15b420" 180 | }, 181 | { 182 | "cell_type": "markdown", 183 | "metadata": {}, 184 | "source": [ 185 | "### Cosine Distance" 186 | ], 187 | "id": "6591546d" 188 | }, 189 | { 190 | "cell_type": "markdown", 191 | "metadata": {}, 192 | "source": [ 193 | "Cosine distance is a measure of orientation rather than actual distance; it's like comparing the directions in which two arrows are pointing, regardless of how long the arrows are or where they are located." 194 | ], 195 | "id": "3d04c0e3" 196 | }, 197 | { 198 | "cell_type": "code", 199 | "execution_count": 6, 200 | "metadata": {}, 201 | "outputs": [], 202 | "source": [ 203 | "cosine = np.dot(dog, cat) / (np.linalg.norm(dog) * np.linalg.norm(cat))\n", 204 | "cosine" 205 | ], 206 | "id": "5bd73484" 207 | }, 208 | { 209 | "cell_type": "markdown", 210 | "metadata": {}, 211 | "source": [ 212 | "### Dot Product" 213 | ], 214 | "id": "315867e3" 215 | }, 216 | { 217 | "cell_type": "markdown", 218 | "metadata": {}, 219 | "source": [ 220 | "The dot product is like measuring how much one vector goes in the same direction as another. It's like comparing two arrows and seeing how much one arrow points in the same direction as the other." 221 | ], 222 | "id": "68b952bf" 223 | }, 224 | { 225 | "cell_type": "code", 226 | "execution_count": 7, 227 | "metadata": {}, 228 | "outputs": [], 229 | "source": [ 230 | "np.dot(dog,cat)" 231 | ], 232 | "id": "8cdea606" 233 | }, 234 | { 235 | "id": "da390400", 236 | "cell_type": "markdown", 237 | "metadata": {}, 238 | "source": [ 239 | "
\n", 240 | "
" 241 | ] 242 | } 243 | ], 244 | "metadata": { 245 | "jupyterlab": { 246 | "notebooks": { 247 | "version_major": 6, 248 | "version_minor": 4 249 | } 250 | }, 251 | "kernelspec": { 252 | "display_name": "Python 3 (ipykernel)", 253 | "language": "python", 254 | "name": "python3" 255 | }, 256 | "language_info": { 257 | "codemirror_mode": { 258 | "name": "ipython", 259 | "version": 3 260 | }, 261 | "file_extension": ".py", 262 | "mimetype": "text/x-python", 263 | "name": "python", 264 | "nbconvert_exporter": "python", 265 | "pygments_lexer": "ipython3", 266 | "version": "3.11.3" 267 | } 268 | }, 269 | "nbformat": 4, 270 | "nbformat_minor": 5 271 | } 272 | -------------------------------------------------------------------------------- /notebooks/performance-troubleshooting/assets/templates/Result-3.template.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | Pipeline Lag Report 7 | 13 | 17 | 18 | 19 | 20 | 29 | 30 | 44 | 58 | 59 |
39 | 43 | 53 | 57 |
60 | 61 |

62 | Pipeline Lag 63 |

64 | 65 |
66 |

Background

67 |
68 |

69 | Pipeline lag in SingleStore refers to the latency between when data is 70 | available in the source system and when it gets loaded into 71 | SingleStore via the pipeline. 72 |

73 |

Factors Influencing latency

74 |
    75 |
  1. Network latency
  2. 76 |
  3. Complexity of transformations within the pipeline
  4. 77 |
  5. Batch Interval configuration
  6. 78 |
  7. Overall system load that affects the pipeline's throughput
  8. 79 |
80 | 81 |

82 | Utilizing the pipeline lag report in SingleStore offers several 83 | benefits: 84 |

85 |
    86 |
  1. 87 |

    88 | Monitoring Pipeline Health: The lag report can be used to monitor 89 | the health of data pipelines, ensuring data is loaded in a timely 90 | manner and identifying potential bottlenecks or delays. 91 |

    92 |
  2. 93 |
  3. 94 |

    95 | Performance Insights: It provides insights into pipeline 96 | performance metrics, aiding in optimization efforts. 97 |

    98 |
  4. 99 |
  5. 100 |

    101 | Resource Management: The report helps manage resources by showing 102 | pipeline impact on system resources. 103 |

    104 |
  6. 105 |
  7. 106 |

    107 | Troubleshooting: It serves as a starting point for troubleshooting 108 | performance issues. 109 |

    110 |
  8. 111 |
  9. 112 |

    113 | Real-time Analytics: For applications relying on real-time data, 114 | monitoring pipeline lag ensures data freshness for quality 115 | insights. 116 |

    117 |
  10. 118 |
119 |

120 | Regularly checking the pipeline lag report ensures efficient pipelines 121 | and current data, crucial for real-time analytics and decision-making. 122 |

123 | 124 |

125 | To diagnose and understand pipeline performance, including potential 126 | lag, SingleStore provides several tools and queries PROFILE PIPELINE 127 | command can be used to gather resource consumption metrics, like 128 | starting and ending times, for operations that a batch processes 129 |

130 |
131 |
132 | 133 |
134 |

Actions

135 |
136 |
    137 |
  1. 138 | Identify Lagging Pipelines 139 |      Please use this query to identify the 140 | long running pipelines
    141 | 142 |
    
    143 | SELECT DATABASE_NAME, PIPELINE_NAME, BATCH_ID, BATCH_STATE, BATCH_TIME FROM information_schema.PIPELINES_BATCHES_SUMMARY ORDER BY BATCH_TIME desc;
    144 |
  2. 145 |
  3. 146 | Adjust Pipeline Settings       To address pipeline lag, we can 148 | modify pipeline configurations for consistent lag 149 | 150 |
    
    151 |     ALTER PIPELINE mypipeline SET BATCH_INTERVAL milliseconds;
    152 |     ALTER PIPELINE mypipeline SET MAX_PARTITIONS_PER_BATCH max_partitions_per_batch;
    153 | 
    154 |     BATCH_INTERVAL the frequency at which the pipeline checks the data source for new data after finishing processing existing data.
    155 |     MAX_PARTITIONS_PER_BATCH  the degree of parallelism and resource usage during the pipeline's execution( lower value reduces system load).
    156 |             
    157 |             
    158 |
  4. 159 |
  5. 160 | Set New Offsets 161 |       useful for addressing lag by skipping 162 | records or resetting the reading position after pipeline changes 163 |
    
    164 |     ALTER PIPELINE mypipeline SET OFFSETS LATEST;
    165 |             
    166 |
  6. 167 |
  7. 168 | Monitor Continuously You can monitor 169 | the pipelines using below views 170 |
    
    171 |     INFORMATION_SCHEMA.PIPELINES_BATCHES_SUMMARY - Provides diagnostic information such as batch processing times.
    172 |     SHOW PIPELINES - Provides current running status.
    173 |     INFORMATION_SCHEMA.PIPELINES_ERRORS - Provides information about any errors occurred.
    174 |             
    175 |
  8. 176 |
177 |
178 |
179 | 180 |
181 |
rstable
182 |
183 | 184 | 197 | 198 |
199 | 207 |
208 | 209 | 214 | 219 | 223 | 227 | 228 | 240 | 241 | 242 | -------------------------------------------------------------------------------- /common/images/singlestore-logo-grey.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /notebooks/python-udf-template/notebook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "id": "1ae3a481", 5 | "cell_type": "markdown", 6 | "metadata": {}, 7 | "source": [ 8 | "
\n", 9 | "
\n", 10 | " \n", 11 | "
\n", 12 | "
\n", 13 | "
SingleStore Notebooks
\n", 14 | "

Run your first Python UDF

\n", 15 | "
\n", 16 | "
" 17 | ] 18 | }, 19 | { 20 | "id": "537627f1", 21 | "cell_type": "markdown", 22 | "metadata": {}, 23 | "source": [ 24 | "
\n", 25 | " \n", 26 | "
\n", 27 | "

Note

\n", 28 | "

This notebook can be run on a Free Starter Workspace. To create a Free Starter Workspace navigate to Start using the left nav. You can also use your existing Standard or Premium workspace with this Notebook.

\n", 29 | "
\n", 30 | "
" 31 | ] 32 | }, 33 | { 34 | "cell_type": "markdown", 35 | "id": "9b6469e0", 36 | "metadata": {}, 37 | "source": [ 38 | "

This feature is currently in Private Preview. Please reach out support@singlestore.com to confirm if this feature can be enabled in your org.

" 39 | ] 40 | }, 41 | { 42 | "attachments": {}, 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "This Jupyter notebook will help you build your first Python UDF using Notebooks, registering it with your database and calling it as part of SQL query." 47 | ], 48 | "id": "22cf54b0" 49 | }, 50 | { 51 | "attachments": {}, 52 | "cell_type": "markdown", 53 | "metadata": {}, 54 | "source": [ 55 | "## Create some simple tables\n", 56 | "\n", 57 | "This setup establishes a basic relational structure to store some reviews for restaurants. Ensure you have selected a database." 58 | ], 59 | "id": "891fb62a" 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 1, 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "%%sql\n", 68 | "DROP TABLE IF EXISTS reviews;\n", 69 | "\n", 70 | "CREATE TABLE IF NOT EXISTS\n", 71 | "reviews (\n", 72 | " review_id INT PRIMARY KEY,\n", 73 | " store_name VARCHAR(255) NOT NULL,\n", 74 | " review TEXT NOT NULL\n", 75 | ");" 76 | ], 77 | "id": "d807ea15" 78 | }, 79 | { 80 | "attachments": {}, 81 | "cell_type": "markdown", 82 | "id": "3aace2e9", 83 | "metadata": {}, 84 | "source": [ 85 | "## Insert sample data" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 2, 91 | "id": "0a123cd7", 92 | "metadata": {}, 93 | "outputs": [], 94 | "source": [ 95 | "%%sql INSERT into reviews (review_id, store_name, review) values\n", 96 | "(\"1\", \"Single Pizza\", \"The staff were very respectful and made thoughtful suggestions. I will definitely go again. 10/10!\"),\n", 97 | "(\"2\", \"Single Pizza\", \"The food was absolutely amazing and the service was fantastic!\"),\n", 98 | "(\"3\", \"Single Pizza\", \"The experience was terrible. The food was cold and the waiter was rude.\"),\n", 99 | "(\"4\", \"Single Pizza\", \"I loved the ambiance and the desserts were out of this world!\"),\n", 100 | "(\"5\", \"Single Pizza\", \"Not worth the price. I expected more based on the reviews\");" 101 | ] 102 | }, 103 | { 104 | "attachments": {}, 105 | "cell_type": "markdown", 106 | "metadata": {}, 107 | "source": [ 108 | "## Define Python UDF functions\n", 109 | "\n", 110 | "Next, we will be Python UDF function using the `@udf` annotation. We will be using the `VADER` model of `nltk` library to perform sentiment analysis on the review text." 111 | ], 112 | "id": "9bc1fbbb" 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": 3, 117 | "id": "1556ad3c", 118 | "metadata": {}, 119 | "outputs": [], 120 | "source": [ 121 | "!pip install nltk" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": 4, 127 | "metadata": {}, 128 | "outputs": [], 129 | "source": [ 130 | "from singlestoredb.functions import udf\n", 131 | "import nltk\n", 132 | "from nltk.sentiment import SentimentIntensityAnalyzer\n", 133 | "\n", 134 | "nltk.download('vader_lexicon')\n", 135 | "sia = SentimentIntensityAnalyzer()\n", 136 | "\n", 137 | "@udf\n", 138 | "def review_sentiment(review: str) -> str:\n", 139 | " print(\"review:\" + review)\n", 140 | " scores = sia.polarity_scores(review)\n", 141 | " sentiment = (\n", 142 | " \"Positive\" if scores['compound'] > 0.05 else\n", 143 | " \"Negative\" if scores['compound'] < -0.05 else\n", 144 | " \"Neutral\"\n", 145 | " )\n", 146 | " print(\"sentiment:\" + sentiment)\n", 147 | " return sentiment" 148 | ], 149 | "id": "94080c32" 150 | }, 151 | { 152 | "attachments": {}, 153 | "cell_type": "markdown", 154 | "metadata": {}, 155 | "source": [ 156 | "## Start the Python UDF server\n", 157 | "\n", 158 | "This will start the server as well as register all the functions annotated with `@udf` as external user defined functions on your selected database." 159 | ], 160 | "id": "9ef4fb97" 161 | }, 162 | { 163 | "cell_type": "code", 164 | "execution_count": 5, 165 | "metadata": {}, 166 | "outputs": [], 167 | "source": [ 168 | "import singlestoredb.apps as apps\n", 169 | "connection_info = await apps.run_udf_app()" 170 | ], 171 | "id": "b716549f" 172 | }, 173 | { 174 | "attachments": {}, 175 | "cell_type": "markdown", 176 | "id": "b53cd3d1", 177 | "metadata": {}, 178 | "source": [ 179 | "## List all registered UDFs\n", 180 | "\n", 181 | "In interactive notebooks, the udf function will be suffixed with `_test` to differentiate it from the published version" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": 6, 187 | "id": "6008982d", 188 | "metadata": {}, 189 | "outputs": [], 190 | "source": [ 191 | "%%sql\n", 192 | "SHOW functions" 193 | ] 194 | }, 195 | { 196 | "attachments": {}, 197 | "cell_type": "markdown", 198 | "id": "58560b03", 199 | "metadata": {}, 200 | "source": [ 201 | "## Call the UDF from SQL\n", 202 | "\n", 203 | "You will now be able to run queries like\n", 204 | "\n", 205 | "```\n", 206 | "SELECT review_id, store_name, review, review_sentiment_test(review) from reviews order by review_id;\n", 207 | "```\n", 208 | "from the SQL editor or any other SQL client.\n", 209 | "\n", 210 | "Try it out by opening another notebook, selecting the current Database and running this query in a new cell." 211 | ] 212 | }, 213 | { 214 | "attachments": {}, 215 | "cell_type": "markdown", 216 | "metadata": {}, 217 | "source": [ 218 | "## Publish Python UDF\n", 219 | "\n", 220 | "After validating the Python UDF interactively, you can publish it and access it like\n", 221 | "\n", 222 | "```\n", 223 | "%%sql\n", 224 | "SELECT review_id, store_name, review, review_sentiment(review) from reviews order by review_id\n", 225 | "```\n", 226 | "\n", 227 | "enriching your data exploration experience seamlessly!" 228 | ], 229 | "id": "825e6fa2" 230 | }, 231 | { 232 | "id": "50dab547", 233 | "cell_type": "markdown", 234 | "metadata": {}, 235 | "source": [ 236 | "
\n", 237 | "
" 238 | ] 239 | } 240 | ], 241 | "metadata": { 242 | "jupyterlab": { 243 | "notebooks": { 244 | "version_major": 6, 245 | "version_minor": 4 246 | } 247 | }, 248 | "kernelspec": { 249 | "display_name": "Python 3 (ipykernel)", 250 | "language": "python", 251 | "name": "python3" 252 | }, 253 | "language_info": { 254 | "codemirror_mode": { 255 | "name": "ipython", 256 | "version": 3 257 | }, 258 | "file_extension": ".py", 259 | "mimetype": "text/x-python", 260 | "name": "python", 261 | "nbconvert_exporter": "python", 262 | "pygments_lexer": "ipython3", 263 | "version": "3.11.9" 264 | } 265 | }, 266 | "nbformat": 4, 267 | "nbformat_minor": 5 268 | } 269 | --------------------------------------------------------------------------------