├── .github └── workflows │ ├── deploy.yml │ └── test-deploy.yml ├── .gitignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── README.md ├── babel.config.js ├── deps └── stackql-docusaurus-plugin-hubspot-1.1.0.tgz ├── docs ├── assets │ ├── ttdbg-ext-install.png │ ├── ttdbg-login-cloud.png │ └── ttdbg-login-cmd-palette.png ├── examples │ └── index.md ├── explanations │ ├── how-workflows-work.md │ └── system-tables.md ├── faq.md ├── index.md ├── integrations │ ├── adding-dbos-to-next.md │ ├── assets │ │ ├── serverfulnext.png │ │ └── serverlessnext.png │ ├── django.md │ ├── nestjs.md │ └── supabase.md ├── partials │ └── _install_node.mdx ├── production │ ├── dbos-cloud │ │ ├── _category_.json │ │ ├── account-management.md │ │ ├── application-management.md │ │ ├── assets │ │ │ ├── cc-logs.png │ │ │ ├── cc-orgs.png │ │ │ ├── cc-traces.png │ │ │ ├── dash-debug-wf.png │ │ │ ├── execution-seconds.png │ │ │ ├── filters.png │ │ │ ├── log.png │ │ │ ├── time_picker.png │ │ │ ├── timeseries.png │ │ │ ├── ttdbg-code-lens.png │ │ │ ├── ttdbg-debugging.png │ │ │ ├── ttdbg-launch-proxy.png │ │ │ ├── ttdbg-wfid-manual.png │ │ │ └── ttdbg-wfid-quick-pick.png │ │ ├── byod-management.md │ │ ├── cicd.md │ │ ├── cloud-cli.md │ │ ├── database-management.md │ │ ├── deploying-to-cloud.md │ │ ├── monitoring-dashboard.md │ │ ├── otel-integration.md │ │ ├── secrets.md │ │ └── workflow-management.md │ ├── index.md │ └── self-hosting │ │ ├── _category_.json │ │ ├── admin-api.md │ │ ├── conductor.md │ │ ├── hosting-with-docker.md │ │ ├── hosting-with-kubernetes.md │ │ ├── workflow-management.md │ │ └── workflow-recovery.md ├── python │ ├── examples │ │ ├── _category_.json │ │ ├── assets │ │ │ ├── cron-starter.png │ │ │ ├── document_detective.png │ │ │ ├── langgraph-agent-architect.png │ │ │ ├── langgraph-agent-workflow.png │ │ │ └── widget_store_ui.png │ │ ├── chatbot.md │ │ ├── cron-starter.md │ │ ├── customer-service.md │ │ ├── document-detective.md │ │ ├── earthquake-tracker.md │ │ ├── hacker-news-bot.md │ │ ├── rag-slackbot.md │ │ ├── scheduled-reminders.md │ │ ├── stock-tracker.md │ │ └── widget-store.md │ ├── integrating-dbos.md │ ├── programming-guide.md │ ├── prompting.md │ ├── reference │ │ ├── _category_.json │ │ ├── cli.md │ │ ├── client.md │ │ ├── configuration.md │ │ ├── contexts.md │ │ ├── dbos-class.md │ │ ├── decorators.md │ │ ├── queues.md │ │ └── workflow_handles.md │ └── tutorials │ │ ├── _category_.json │ │ ├── assets │ │ ├── ttdb-debug-breakpoint.png │ │ ├── ttdb-wfid-picker.png │ │ ├── ttdbg-cloud-replay.png │ │ ├── ttdbg-local-replay.png │ │ └── ttdbg-proxy-terminal.png │ │ ├── authentication-authorization.md │ │ ├── classes.md │ │ ├── debugging.md │ │ ├── kafka-integration.md │ │ ├── logging-and-tracing.md │ │ ├── queue-tutorial.md │ │ ├── scheduled-workflows.md │ │ ├── step-tutorial.md │ │ ├── testing.md │ │ ├── transaction-tutorial.md │ │ ├── workflow-management.md │ │ └── workflow-tutorial.md ├── quickstart.md ├── typescript │ ├── examples │ │ ├── _category_.json │ │ ├── assets │ │ │ ├── alert_center_ui.png │ │ │ ├── dbos-task-scheduler-main.png │ │ │ ├── shop-guide-diagram-source.txt │ │ │ └── shop-guide-diagram.svg │ │ ├── checkout-tutorial.md │ │ ├── kafka-alert-queue.md │ │ └── task-scheduler.md │ ├── integrating-dbos.md │ ├── programming-guide.md │ ├── prompting.md │ ├── reference │ │ ├── _category_.json │ │ ├── client.md │ │ ├── configuration.md │ │ ├── libraries.md │ │ ├── tools │ │ │ ├── _category_.json │ │ │ ├── assets │ │ │ │ └── ttdbg-proxy-terminal.png │ │ │ ├── cli.md │ │ │ └── dbos-compiler.md │ │ └── transactapi │ │ │ ├── _category_.json │ │ │ ├── dbos-class.md │ │ │ ├── eventreceivercontext.md │ │ │ ├── workflow-handles.md │ │ │ └── workflow-queues.md │ └── tutorials │ │ ├── _category_.json │ │ ├── assets │ │ ├── console-debug-picker.png │ │ ├── ttdb-debug-breakpoint.png │ │ ├── ttdb-wfid-picker.png │ │ ├── ttdbg-cloud-replay.png │ │ ├── ttdbg-local-replay.png │ │ ├── ttdbg-time-travel.png │ │ └── ttdbg-wfid-picker-with-console.png │ │ ├── authentication-authorization.md │ │ ├── debugging.md │ │ ├── development │ │ ├── _category_.json │ │ ├── static-analysis.md │ │ ├── testing-tutorial.md │ │ └── using-libraries.md │ │ ├── instantiated-objects.md │ │ ├── logging.md │ │ ├── queue-tutorial.md │ │ ├── requestsandevents │ │ ├── _category_.json │ │ ├── custom-event-receiver.md │ │ ├── http-serving-tutorial.md │ │ └── kafka-integration.md │ │ ├── scheduled-workflows.md │ │ ├── step-tutorial.md │ │ ├── stored-proc-tutorial.md │ │ ├── transaction-tutorial.md │ │ └── workflow-tutorial.md └── why-dbos.md ├── docusaurus.config.js ├── package-lock.json ├── package.json ├── sidebars.js ├── src ├── components │ ├── BrowserWindow │ │ ├── index.tsx │ │ └── styles.module.css │ ├── CardComponents.tsx │ ├── LargeTabs.js │ └── styles.module.css ├── css │ └── custom.css └── theme │ └── MDXComponents.js └── static ├── .nojekyll ├── CNAME └── img ├── ai-starter ├── 1-pick-template.png ├── 2-ready-deploy.png └── 4-app-page.png ├── conductor ├── app-page.png ├── conductor-key.png ├── list-apps.png └── register-app.png ├── cron-starter ├── 1-pick-template.png ├── 2-ready-deploy.png └── 4-app-page.png ├── dbos-logo-dark.png ├── dbos-logo.png ├── discord-mark-blue.svg ├── favicon.ico ├── python-logo-only.svg ├── quickstart ├── 1-pick-template.png ├── 3-deploy-github.png ├── 4-deploy-success.png ├── 5-app-page.png ├── node-app-starter.png └── python-app-starter.png ├── secrets └── secrets-page.png ├── social-card.jpg ├── supabase-starter ├── 1-supabase-list.png ├── 2-supabase-starter.png ├── 3-choose-supabase.png └── databases.png ├── typescript-logo.svg ├── why-dbos ├── dbos-pg.png ├── dbos-steps-pg.jpg └── workflow-example.png └── workflow-management ├── queue-list.png ├── workflow-details.png ├── workflow-fork.png ├── workflow-list.png └── workflow-steps.png /.github/workflows/deploy.yml: -------------------------------------------------------------------------------- 1 | name: Deploy to GitHub Pages 2 | 3 | on: 4 | # push: ### Temporarily disable auto deployment on push to main. 5 | # branches: 6 | # - main 7 | # Review gh actions docs if you want to further define triggers, paths, etc 8 | # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#on 9 | workflow_dispatch: 10 | 11 | jobs: 12 | deploy: 13 | name: Deploy to GitHub Pages 14 | runs-on: ubuntu-latest 15 | permissions: 16 | contents: write 17 | steps: 18 | - uses: actions/checkout@v3 19 | - uses: actions/setup-node@v3 20 | with: 21 | node-version: 18 22 | cache: npm 23 | 24 | - name: Install dependencies 25 | run: npm ci 26 | - name: Build website 27 | run: npm run build 28 | 29 | # Popular action to deploy to GitHub Pages: 30 | # Docs: https://github.com/peaceiris/actions-gh-pages#%EF%B8%8F-docusaurus 31 | - name: Deploy to GitHub Pages 32 | uses: peaceiris/actions-gh-pages@v3 33 | with: 34 | github_token: ${{ secrets.GITHUB_TOKEN }} 35 | # Build output to publish to the `gh-pages` branch: 36 | publish_dir: ./build 37 | # The following lines assign commit authorship to the official 38 | # GH-Actions bot for deploys to `gh-pages` branch: 39 | # https://github.com/actions/checkout/issues/13#issuecomment-724415212 40 | # The GH actions bot is used by default if you didn't specify the two fields. 41 | # You can swap them out with your own user credentials. 42 | user_name: github-actions[bot] 43 | user_email: 41898282+github-actions[bot]@users.noreply.github.com -------------------------------------------------------------------------------- /.github/workflows/test-deploy.yml: -------------------------------------------------------------------------------- 1 | name: Test deployment 2 | 3 | on: 4 | push: ### Temporarily enable test on main. 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | # Review gh actions docs if you want to further define triggers, paths, etc 11 | # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#on 12 | workflow_dispatch: 13 | 14 | jobs: 15 | test-deploy: 16 | name: Test deployment 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v3 20 | - uses: actions/setup-node@v3 21 | with: 22 | node-version: 18 23 | cache: npm 24 | 25 | - name: Install dependencies 26 | run: npm ci 27 | - name: Test build website 28 | run: npm run build -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Dependencies 2 | /node_modules 3 | 4 | # Production 5 | /build 6 | 7 | # Generated files 8 | .docusaurus 9 | .cache-loader 10 | 11 | # Misc 12 | .DS_Store 13 | .env.local 14 | .env.development.local 15 | .env.test.local 16 | .env.production.local 17 | 18 | npm-debug.log* 19 | yarn-debug.log* 20 | yarn-error.log* 21 | 22 | # Editor temp 23 | *.swp 24 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | As contributors and maintainers of the DBOS Documentation project, we pledge to respect everyone who contributes by posting issues, updating documentation, submitting pull requests, providing feedback in comments, and any other activities. 3 | 4 | ## Our Standards 5 | Examples of behavior that contributes to creating a positive environment include: 6 | 7 | * Using welcoming and inclusive language. 8 | * Being respectful of differing viewpoints and experiences. 9 | * Gracefully accepting constructive criticism. 10 | * Focusing on what is best for the community. 11 | * Showing empathy towards other community members. 12 | 13 | Examples of unacceptable behavior by participants include: 14 | 15 | * The use of sexualized language or imagery and unwelcome sexual attention or advances. 16 | * Trolling, insulting/derogatory comments, and personal or political attacks. 17 | * Public or private harassment. 18 | * Publishing others' private information, such as physical or electronic addresses, without explicit permission. 19 | * Other conduct which could reasonably be considered inappropriate in a professional setting 20 | Our Responsibilities. 21 | * Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 22 | 23 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned with this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 24 | 25 | ## Scope 26 | This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. 27 | 28 | ## Enforcement 29 | Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at 30 | contact@dbos.dev. All complaints will be reviewed and investigated promptly and fairly. 31 | 32 | All project team members are obligated to respect the privacy and security of the reporter of any incident. 33 | 34 | ## Enforcement Guidelines 35 | Project maintainers will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: 36 | 37 | * 1st offense: Verbal warning 38 | * 2nd offense: Written warning 39 | * 3rd offense: Temporary ban 40 | * 4th offense: Permanent ban 41 | 42 | ## Attribution 43 | This Code of Conduct is adapted from the Contributor Covenant, version 2.0, available at https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to DBOS-Docs 2 | 3 | Thank you for considering contributing to the DBOS Documentation. We welcome contributions from everyone, including bug fixes, feature enhancements, documentation improvements, or any other form of contribution. 4 | 5 | ## How to Contribute 6 | 7 | To get started with DBOS-Docs, please read the [README](README.md). 8 | 9 | You can contribute in many ways. Some simple ways are: 10 | * Open issues to report any bugs, questions, concern with DBOS Transact, samples or documentation. 11 | * Respond to issues with advice or suggestions. 12 | * Participate in discussions in our [Discord](https://discord.gg/fMwQjeW5zg) channel. 13 | * Contribute fixes and improvement to code, samples or documentation. 14 | 15 | ### To contribute code, please follow these steps: 16 | 17 | 1. Fork this github repository to your own account. 18 | 19 | 2. Clone the forked repository to your local machine. 20 | 21 | 3. Create a branch. 22 | 23 | 4. Make the necessary change to code, samples or documentation. 24 | 25 | 5. Commit the changes to your forked repository. 26 | 27 | 6. Submit a pull request to this repository. 28 | In the PR description please include: 29 | * Description of the fix/feature. 30 | * Brief description of implementation. 31 | 32 | By submitting a pull request, you represent that you have the right to license your contribution to DBOS and the community, and agree by submitting the patch that your contributions are licensed under the MIT license. 33 | 34 | ## Requesting features 35 | 36 | If you have a feature request or an idea for an enhancement, feel free to open an issue on GitHub. Describe the feature or enhancement you'd like to see and why it would be valuable. Discuss it with the community on the [Discord](https://discord.gg/fMwQjeW5zg) channel. 37 | 38 | ## Discuss with the community 39 | 40 | If you are stuck, need help, or wondering if a certain contribution will be welcome, please ask! You can reach out to us on [Discord](https://discord.gg/fMwQjeW5zg) or Github discussions. 41 | 42 | ## Code of conduct 43 | 44 | It is important to us that contributing to DBOS will be a pleasant experience, if necessary, please refer to our [code of conduct](CODE_OF_CONDUCT.md) for participation guidelines. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DBOS Documentation 2 | 3 | ### Local Development 4 | 5 | This site is built using [Docusaurus](https://docusaurus.io/). 6 | All documentation is written in Markdown in the `/docs` folder. 7 | Site-wide configuration (e.g., header, footer, favicon) is controlled from `docusaurus.config.js`. 8 | For detailed information, see the [Docusaurus documentation](https://docusaurus.io/docs/docs-introduction). 9 | 10 | ### Local Deployment 11 | 12 | ``` 13 | npm install 14 | npm run start 15 | ``` 16 | 17 | ### Publish on Github 18 | 19 | Publishing is done automatically by a commit hook. You should never need to run this yourself: 20 | 21 | ``` 22 | GIT_USER= npx run deploy 23 | ``` 24 | -------------------------------------------------------------------------------- /babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [require.resolve('@docusaurus/core/lib/babel/preset')], 3 | }; 4 | -------------------------------------------------------------------------------- /deps/stackql-docusaurus-plugin-hubspot-1.1.0.tgz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/deps/stackql-docusaurus-plugin-hubspot-1.1.0.tgz -------------------------------------------------------------------------------- /docs/assets/ttdbg-ext-install.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/assets/ttdbg-ext-install.png -------------------------------------------------------------------------------- /docs/assets/ttdbg-login-cloud.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/assets/ttdbg-login-cloud.png -------------------------------------------------------------------------------- /docs/assets/ttdbg-login-cmd-palette.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/assets/ttdbg-login-cmd-palette.png -------------------------------------------------------------------------------- /docs/examples/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: DBOS Examples 3 | description: Example applications built with DBOS 4 | pagination_next: null 5 | --- 6 | 7 | # Featured Examples 8 | 9 | import { FaHackerNews, FaSlack } from "react-icons/fa6"; 10 | import { MdOutlineShoppingCart } from "react-icons/md"; 11 | import { SiApachekafka, SiOpenai } from "react-icons/si"; 12 | import { IoEarth } from "react-icons/io5"; 13 | import { RiCalendarScheduleLine } from "react-icons/ri"; 14 | import { IoIosChatboxes } from "react-icons/io"; 15 | import { PiFileMagnifyingGlassBold } from "react-icons/pi"; 16 | import { RiCustomerService2Line } from "react-icons/ri"; 17 | import { TbClock2 } from "react-icons/tb"; 18 | import { VscGraphLine } from "react-icons/vsc"; 19 | 20 |
21 | } 26 | language="python" 27 | /> 28 | } 33 | language="python" 34 | /> 35 | } 40 | language="python" 41 | /> 42 | } 47 | language="typescript" 48 | /> 49 | } 54 | language="python" 55 | /> 56 | } 61 | language="typescript" 62 | /> 63 | } 68 | language="python" 69 | /> 70 | } 75 | language="python" 76 | /> 77 | } 82 | language="python" 83 | /> 84 | } 89 | language="python" 90 | /> 91 | } 96 | language="python" 97 | /> 98 | } 103 | language="python" 104 | /> 105 | } 110 | language="typescript" 111 | /> 112 |
113 | -------------------------------------------------------------------------------- /docs/explanations/system-tables.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 10 3 | title: DBOS System Tables 4 | description: DBOS system tables reference 5 | --- 6 | 7 | ## System Tables 8 | DBOS Transact records application execution history in several system tables. 9 | Most of these tables are in the system database, whose name is your application name suffixed with `_dbos_sys`. 10 | For example, if your application is named `dbos_app_starter`, your system database is named `dbos_app_starter_dbos_sys`. 11 | One exception is the `dbos.transaction_outputs` table which is stored in your application database. 12 | 13 | ### dbos.workflow_status 14 | 15 | This table stores workflow execution information. 16 | Each row represents a different workflow execution. 17 | 18 | **Columns:** 19 | - `workflow_uuid`: The unique identifier of the workflow execution. 20 | - `status`: The status of the workflow execution. One of `PENDING`, `SUCCESS`, `ERROR`, `RETRIES_EXCEEDED`, or `CANCELLED`. 21 | - `name`: The name (in Python, fully qualified name) of the workflow function. 22 | - `authenticated_user`: The user who ran the workflow. Empty string if not set. 23 | - `assumed_role`: The role used to run this workflow. Empty string if authorization is not required. 24 | - `authenticated_roles`: All roles the authenticated user has, if any. 25 | - `request`: The serialized HTTP Request that triggered this workflow, if any. 26 | - `inputs`: The serialized inputs of the workflow execution. 27 | - `output`: The serialized workflow output, if any. 28 | - `error`: The serialized error thrown by the workflow, if any. 29 | - `created_at`: The epoch timestamp of when this workflow was created (enqueued or started). 30 | - `updated_at`: The latest epoch timestamp when this workflow status was updated. 31 | - `application_version`: The application version of this workflow code. 32 | - `class_name`: The class name of the workflow function. 33 | - `config_name`: The name of the configured instance of this workflow, if any. 34 | - `recovery_attempts`: The number of attempts (so far) to recovery this workflow. 35 | - `queue_name`: If this workflow is or was enqueued, the name of the queue. 36 | - `executor_id`: The ID of the executor that ran this workflow. 37 | - `workflow_timeout_ms`: The timeout of the workflow, if specified. 38 | - `workflow_deadline_epoch_ms`: The deadline at which the workflow times out, if the workflow has a timeout. Derived when the workflow starts by adding the timeout to the workflow start time (which may be different than the creation time for enqueued workflows). 39 | - `started_at_epoch_ms`: If this workflow was enqueued, the time at which it was dequeued and began excution. 40 | - `deduplication_id`: The deduplication key for this workflow, if any. 41 | - `priority`: The priority of this workflow on its queue, if enqueued. Defaults to 0 if not specified. Lower priorities execute first. 42 | 43 | ### dbos.operation_outputs 44 | This table stores the outputs of workflow steps. 45 | Each row represents a different workflow step execution. 46 | Executions of DBOS methods like `DBOS.sleep` and `DBOS.send` are also recorded here as steps, as is enqueueing or starting a child workflow. 47 | 48 | **Columns:** 49 | - `workflow_uuid`: The unique identifier of the workflow execution this function belongs to. 50 | - `function_id`: The monotonically increasing ID of the step (starts from 0) within the workflow, based on the order in which steps execute. 51 | - `function_name`: The name of the step. 52 | - `output`: The serialized transaction output, if any. 53 | - `error`: The serialized error thrown by the transaction, if any. 54 | - `child_workflow_id`: If the step starts a new child workflow, its ID. 55 | 56 | ### dbos.notifications 57 | This table stores workflow messages/notifications. 58 | Each entry represents a different message. 59 | 60 | **Columns:** 61 | - `destination_uuid`: The ID of the workflow to which the message is sent. 62 | - `topic`: The topic to which the message is sent. 63 | - `message`: The serialized contents of the message. 64 | - `created_at_epoch_ms`: The epoch timestamp when this message was created. 65 | - `message_uuid`: The unique ID of the message. 66 | 67 | ### dbos.workflow_events 68 | This table stores workflow events. 69 | Each entry represents a different event. 70 | 71 | **Columns:** 72 | - `workflow_uuid`: The ID of the workflow that published this event. 73 | - `key`: The serialized key of the event. 74 | - `value`: The serialized value of the event. 75 | 76 | ### dbos.transaction_outputs 77 | This table stores the outputs of transaction functions. 78 | Each row represents a different transaction function execution. 79 | 80 | **Columns:** 81 | - `workflow_uuid`: The unique identifier of the workflow execution this function belongs to. 82 | - `function_id`: The monotonically increasing ID of the function (starts from 0) within the workflow, based on the start order. 83 | - `output`: The serialized transaction output, if any. 84 | - `error`: The serialized error thrown by the transaction, if any. 85 | - `txn_id`: The transaction ID of this function, if any. This is empty for read-only transactions. 86 | - `created_at`: The timestamp of when this function started. 87 | - `txn_snapshot`: The [Postgres snapshot](https://www.postgresql.org/docs/current/functions-info.html#FUNCTIONS-INFO-SNAPSHOT) of this transaction. -------------------------------------------------------------------------------- /docs/integrations/assets/serverfulnext.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/integrations/assets/serverfulnext.png -------------------------------------------------------------------------------- /docs/integrations/assets/serverlessnext.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/integrations/assets/serverlessnext.png -------------------------------------------------------------------------------- /docs/integrations/django.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 39 3 | title: Django 4 | --- 5 | 6 | # Make Your Django App Reliable with DBOS 7 | 8 | This guide shows you how to add the open source [DBOS Transact](https://github.com/dbos-inc/dbos-transact-py) library to your existing [Django](https://www.djangoproject.com/) application to **durably execute** it and make it resilient to any failure. 9 | 10 | In summary you'll need to: 11 | - Start DBOS with your [AppConfig's ready method](https://docs.djangoproject.com/en/5.2/ref/applications/#django.apps.AppConfig.ready) 12 | - Annotate your service methods with DBOS decorators to make them durable 13 | - Start Django with the `--noreload` flag. 14 | 15 | ## Installation and Requirements 16 | :::info 17 | The guide is based on the Django [quickstart](https://docs.djangoproject.com/en/5.2/intro/tutorial01/), will show you how to make your application reliable with [DBOS Transact](https://github.com/dbos-inc/dbos-transact-py). 18 | ::: 19 | 20 |
21 | Setting up the Django quickstart 22 | 23 | This application was created with: 24 | 25 | ```shell 26 | python3 -m venv .venv 27 | source .venv/bin/activate 28 | pip install django 29 | django-admin startproject djangodbos . 30 | python manage.py startapp polls 31 | ``` 32 | 33 | Then, configure `djangodbos/settings.py` to [use Postgres](https://docs.djangoproject.com/en/5.2/ref/settings/#databases) and run `python manage.py migrate`. 34 |
35 | 36 | Install DBOS Python with: 37 | ```shell 38 | pip install dbos 39 | ``` 40 | 41 | ## Starting DBOS 42 | 43 | In your Django application `AppConfig`, start DBOS inside the `ready` method. You can [configure the DBOS instance](https://docs.dbos.dev/python/reference/configuration) before [launching DBOS](https://docs.dbos.dev/python/reference/dbos-class#launch). 44 | 45 | 46 | ```python 47 | from django.apps import AppConfig 48 | from dbos import DBOS 49 | import os 50 | 51 | class PollsConfig(AppConfig): 52 | default_auto_field = 'django.db.models.BigAutoField' 53 | name = 'polls' 54 | 55 | def ready(self): 56 | dbos_config = { 57 | "name": name, 58 | "database_url": os.environ.get("DBOS_DATABASE_URL"), 59 | } 60 | dbos = DBOS(config=dbos_config) 61 | dbos.launch() 62 | return super().ready() 63 | ``` 64 | 65 | Because launching DBOS triggers worfklow recovery, it is advised you call `python manage.py runserver` with the `--noreload` flag. 66 | 67 | ## Making Your Views Reliable 68 | 69 | You can make a Django view durable by annotating your functions with [DBOS decorators](https://docs.dbos.dev/python/reference/decorators). 70 | 71 | In this example, we'll augment an existing endpoint `callWorkflow` to invoke a [workflow](../python/tutorials/workflow-tutorial) of two steps. 72 | 73 | ```python 74 | def callWorkflow(request, a, b): 75 | return JsonResponse(workflow(a, b)) 76 | 77 | # Annotate the workflow() function to make it a durable workflow 78 | @DBOS.workflow() 79 | def workflow(a, b): 80 | res1 = step1(a) 81 | res2 = step2(b) 82 | result = res1 + res2 83 | return {"result": result} 84 | 85 | # Make step1() a durable step 86 | @DBOS.step() 87 | def step1(var): 88 | return var 89 | 90 | # Make step2 a durable transaction (special step with ACID properties) 91 | @DBOS.transaction() 92 | def step2(var): 93 | rows = DBOS.sql_session.execute(sa.text("SELECT 1")).fetchall() 94 | return var + str(rows[0][0]) 95 | ``` 96 | 97 | Update `polls/urls.py` and run your app with `python manage.py runserver --noreload` to access the view at `http://localhost:8000/polls/callWorkflow/a/b`. 98 | -------------------------------------------------------------------------------- /docs/integrations/nestjs.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 2 3 | title: Nest.js 4 | --- 5 | 6 | This guide shows you how to add the open source [DBOS Transact](https://github.com/dbos-inc/dbos-transact-ts) library to your existing [Nest.js](https://nestjs.com/) application to **durably execute** it and make it resilient to any failure. 7 | 8 | ## Installation and Requirements 9 | 10 | Install DBOS TypeScript with: 11 | 12 | ```shell 13 | npm install @dbos-inc/dbos-sdk 14 | ``` 15 | 16 | ## Bootstrapping DBOS 17 | 18 | :::info 19 | This example was bootstrapped with `nest new nest-starter` and configured to use [NPM](https://www.npmjs.com/). 20 | ::: 21 | 22 | Modify your bootstrap function to import and launch DBOS: 23 | 24 | ```typescript 25 | // main.ts 26 | import { NestFactory } from '@nestjs/core'; 27 | import { AppModule } from './app.module'; 28 | // highlight-next-line 29 | import { DBOS } from "@dbos-inc/dbos-sdk"; 30 | 31 | async function bootstrap() { 32 | const app = await NestFactory.create(AppModule); 33 | // highlight-next-line 34 | DBOS.setConfig({ 35 | // highlight-next-line 36 | "name": "my-app", 37 | // highlight-next-line 38 | "databaseUrl": process.env.DBOS_DATABASE_URL 39 | // highlight-next-line 40 | }); 41 | // highlight-next-line 42 | await DBOS.launch(); 43 | await app.listen(process.env.PORT ?? 3000); 44 | } 45 | bootstrap(); 46 | ``` 47 | 48 | ## Register Services With DBOS 49 | To integrate a Nest.js service with DBOS, your service class must extend the DBOS [ConfiguredInstance](https://docs.dbos.dev/typescript/reference/transactapi/dbos-class#decorating-instance-methods) class. By extending `ConfiguredInstance`, you add your class instance methods to DBOS Transact's internal registry. During [workflow recovery](https://docs.dbos.dev/typescript/tutorials/workflow-tutorial#workflow-versioning-and-recovery), this registry enables DBOS to recover workflows using the right class instance. 50 | 51 | Here is an example of a Nest.js service implementing a simple two-step workflow: 52 | 53 | ```typescript 54 | // app.service.ts 55 | import { Injectable } from '@nestjs/common'; 56 | import { PrismaService } from 'nestjs-prisma'; 57 | // highlight-next-line 58 | import { ConfiguredInstance, DBOS } from '@dbos-inc/dbos-sdk'; 59 | 60 | @Injectable() 61 | // highlight-next-line 62 | export class AppService extends ConfiguredInstance { 63 | constructor( 64 | name: string, // You must provide a name to uniquely identify this class instance in DBOS's internal registry. 65 | private readonly prisma: PrismaService, // An example service dependency 66 | ) { 67 | super(name); 68 | } 69 | 70 | // Optionally perform some asynchronous setup work 71 | override async initialize(): Promise {} 72 | 73 | // highlight-next-line 74 | @DBOS.workflow() 75 | async businessLogic() { 76 | await this.step1(); 77 | await this.step2(); 78 | } 79 | 80 | // highlight-next-line 81 | @DBOS.step() 82 | async step1() { 83 | ... 84 | } 85 | 86 | // highlight-next-line 87 | @DBOS.step() 88 | async step2() { 89 | ... 90 | }; 91 | } 92 | ``` 93 | 94 | ## Add Nest.js Providers 95 | We also need to write the code that Nest will use to instantiate this service during dependency injection. We'll do this with a [custom Factory Provider](https://docs.nestjs.com/fundamentals/custom-providers#factory-providers-usefactory). Here is an example: 96 | 97 | ```typescript 98 | // app.modules.ts 99 | import { Module } from '@nestjs/common'; 100 | import { Provider } from '@nestjs/common/interfaces'; 101 | import { AppController } from './app.controller'; 102 | import { AppService } from './app.service'; 103 | import { PrismaService, PrismaModule } from 'nestjs-prisma'; 104 | import { DBOS } from '@dbos-inc/dbos-sdk'; 105 | 106 | export const dbosProvider: Provider = { 107 | provide: AppService, 108 | useFactory: (prisma: PrismaService) => { 109 | return new AppService("dbosService", prisma); 110 | }, 111 | inject: [PrismaService], 112 | }; 113 | 114 | @Module({ 115 | imports: [PrismaModule.forRoot()], 116 | controllers: [AppController], 117 | providers: [dbosProvider], 118 | }) 119 | export class AppModule {} 120 | ``` 121 | 122 | If you need multiple instances of your DBOS class, you must give them distinct names (`dbosService` in this case). You can create a dedicated provider for each or use a single provider for multiple classes, at your convenience. 123 | -------------------------------------------------------------------------------- /docs/partials/_install_node.mdx: -------------------------------------------------------------------------------- 1 | 2 | 3 | Run the following commands in your terminal: 4 | 5 | ```bash 6 | curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.40.1/install.sh | bash 7 | 8 | export NVM_DIR="$HOME/.nvm" 9 | [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" # This loads nvm 10 | 11 | nvm install 22 12 | nvm use 22 13 | ``` 14 | 15 | 16 | 17 | Download Node.js 20 or later from the [official Node.js download page](https://nodejs.org/en/download) and install it. 18 | After installing Node.js, create the following folder: `C:\Users\%user%\AppData\Roaming\npm` 19 | (`%user%` is the Windows user on which you are logged in). 20 | 21 | -------------------------------------------------------------------------------- /docs/production/dbos-cloud/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "DBOS Cloud", 3 | "position": 30 4 | } 5 | -------------------------------------------------------------------------------- /docs/production/dbos-cloud/account-management.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 30 3 | title: Account Management 4 | description: Learn how to manage DBOS Cloud users 5 | toc_max_heading_level: 3 6 | --- 7 | 8 | In this guide, you'll learn how to manage DBOS Cloud accounts. 9 | 10 | ### New User Registration 11 | 12 | You can sign up for an account on the [DBOS Cloud console](https://console.dbos.dev/login-redirect). 13 | Additionally, all `dbos-cloud` commands prompt you to register a new account if you don't already have one. 14 | 15 | ### Authenticating Programatically 16 | 17 | Sometimes, such as in a CI/CD pipeline, it is useful to authenticate programatically without providing credentials through a browser-based login portal. 18 | DBOS Cloud provides this capability with refresh tokens. 19 | To obtain a refresh token, run: 20 | 21 | ``` 22 | dbos-cloud login --get-refresh-token 23 | ``` 24 | 25 | This command has you authenticate through the browser, but obtains a refresh token and stores it in `.dbos/credentials`. 26 | 27 | Once you have your token, you can use it to authenticate programatically without going through the browser with the following command: 28 | 29 | ``` 30 | dbos-cloud login --with-refresh-token 31 | ``` 32 | 33 | Refresh tokens automatically expire after a year or after a month of inactivity. 34 | You can manually revoke them at any time: 35 | 36 | ``` 37 | dbos-cloud revoke 38 | ``` 39 | 40 | :::warning 41 | Until they expire or are revoked, refresh tokens can be used to log in to your account. 42 | Treat them as secrets and keep them safe! 43 | ::: 44 | 45 | 46 | ### Organization Management 47 | 48 | :::info 49 | This feature is currently only available to [DBOS Pro or Enterprise](https://www.dbos.dev/pricing) subscribers. 50 | ::: 51 | 52 | Organizations allow multiple users to collaboratively manage applications. 53 | When a user creates an account, they are automatically added to an organization containing only them, where the organization name is the same as their username. 54 | 55 | You can manage your organization from the [cloud console organizations page](https://console.dbos.dev/settings/organization): 56 | 57 | ![Organizations](./assets/cc-orgs.png) 58 | 59 | #### Organization Admins 60 | 61 | The original creator of an organization is the organization admin. 62 | Only the organization admin can invite new users, delete existing users, or rename the organization. 63 | All users have full access to organization resources, including databases and applications. 64 | 65 | #### Inviting New Users 66 | 67 | To invite a new user to your organization, click the "Generate Invite Link" button. 68 | This generates a **single-use** URL for joining your organization. 69 | 70 | When a user signs in to the cloud console using that URL, they are prompted to join your organization. 71 | If they do not have an account, they are prompted to create one. 72 | If they already have an account, they must delete all resources (applications and databases) before joining your organization. 73 | 74 | #### Renaming Your Organization 75 | 76 | You can rename your organization by clicking the icon next to your organization name. 77 | Note that applications belonging to organizations are hosted at the URL `https://-.cloud.dbos.dev/`. 78 | **Therefore, renaming your organization changes your application URLs**. 79 | 80 | #### Removing Users 81 | 82 | The organization admin can remove any other user from their organization. 83 | This immediately terminates their access to all organization resources. 84 | -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/cc-logs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/cc-logs.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/cc-orgs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/cc-orgs.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/cc-traces.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/cc-traces.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/dash-debug-wf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/dash-debug-wf.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/execution-seconds.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/execution-seconds.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/filters.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/filters.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/log.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/log.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/time_picker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/time_picker.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/timeseries.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/timeseries.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/ttdbg-code-lens.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/ttdbg-code-lens.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/ttdbg-debugging.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/ttdbg-debugging.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/ttdbg-launch-proxy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/ttdbg-launch-proxy.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/ttdbg-wfid-manual.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/ttdbg-wfid-manual.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/assets/ttdbg-wfid-quick-pick.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/production/dbos-cloud/assets/ttdbg-wfid-quick-pick.png -------------------------------------------------------------------------------- /docs/production/dbos-cloud/byod-management.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 50 3 | title: Bringing Your Own Database 4 | --- 5 | 6 | In this guide, you'll learn how to bring your own Postgres database instance to DBOS Cloud and deploy your applications to it. 7 | 8 | ### Linking Your Database to DBOS Cloud 9 | 10 | To bring your own Postgres database instance to DBOS Cloud, you must first create a role DBOS Cloud can use to deploy and manage your apps. 11 | This role must be named `dbosadmin` and must have the `LOGIN` and `CREATEDB` privileges: 12 | 13 | ```sql 14 | CREATE ROLE dbosadmin WITH LOGIN CREATEDB PASSWORD ; 15 | ``` 16 | 17 | Next, link your database instance to DBOS Cloud, entering the password for the `dbosadmin` role when prompted. 18 | You must choose a database instance name that is 3 to 16 characters long and contains only lowercase letters, numbers and underscores. 19 | 20 | ```shell 21 | dbos-cloud db link -H -p 22 | ``` 23 | 24 | You can now register and deploy applications with this database instance as normal! Check out our [applications management](./application-management.md) guide for details. 25 | 26 | :::tip 27 | DBOS Cloud is currently hosted in AWS us-east-1. 28 | For maximum performance, we recommend linking a database instance hosted there. 29 | ::: 30 | -------------------------------------------------------------------------------- /docs/production/dbos-cloud/cicd.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 90 3 | title: CI/CD Best Practices 4 | --- 5 | 6 | ## Staging and Production Environments 7 | 8 | To make it easy to test changes to your application without affecting your production users, we recommend using separate staging and production environments. 9 | You can do this by deploying your application with different names for staging and production. 10 | For example, when deploying `my-app` to staging, deploy using: 11 | 12 | ```shell 13 | dbos-cloud app deploy my-app-staging 14 | ``` 15 | 16 | When deploying to production, use: 17 | 18 | ```shell 19 | dbos-cloud app deploy my-app-prod 20 | ``` 21 | 22 | `my-app-staging` and `my-app-prod` are completely separate and isolated DBOS applications. 23 | There's nothing special about the `-staging` and `-prod` suffixes—you can use any names you like. 24 | 25 | :::info 26 | If you manually specify the application database name by setting `app_db_name` in `dbos_config.yaml`, you must ensure each environment uses a different value of `app_db_name`. 27 | ::: 28 | 29 | ## Authentication 30 | You should use [refresh tokens](account-management#authenticating-programatically) to programmatically authenticate your CI/CD user with DBOS Cloud. 31 | 32 | :::info 33 | Upgrading to a DBOS Cloud paid plan will unlock [multi-user organizations](account-management#organization-management) which you can use to setup dedicated users for CI/CD. 34 | ::: 35 | -------------------------------------------------------------------------------- /docs/production/dbos-cloud/database-management.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 40 3 | title: Database Management 4 | description: Learn how to manage DBOS Cloud database instances 5 | --- 6 | 7 | ### Provisioning Database Instances 8 | 9 | Before you can deploy an application to DBOS Cloud, you must provision a Postgres database instance (server) for it. 10 | You must choose a database instance name, username and password. 11 | 12 | :::info 13 | * Both the database instance name and username must be 3 to 16 characters long and contain only lowercase letters, numbers and underscores. 14 | * The username must start with a letter. 15 | * The usernames `dbosadmin`, `dbos`, `postgres` and `admin` are reserved and cannot be used. 16 | * The database password must contain between 8 and 128 characters, and cannot contain the characters `/`, `"`, `@`, `'`, or whitespaces. 17 | ::: 18 | 19 | Run this command and choose your database password when prompted: 20 | 21 | ```shell 22 | dbos-cloud db provision -U 23 | ``` 24 | 25 | :::info 26 | A Postgres database instance (server) can host many independent databases used by different applications. 27 | Each application is deployed to an isolated database by default; you can configure this through the `app_db_name` field in `dbos-config.yaml`. 28 | ::: 29 | 30 | :::info 31 | If you forget your database password, you can always [reset it](./cloud-cli.md#dbos-cloud-db-reset-password). 32 | ::: 33 | 34 | To see a list of all provisioned instances and their statuses, run: 35 | 36 | ```shell 37 | dbos-cloud db list 38 | ``` 39 | 40 | To retrieve the status of a particular instance, run: 41 | 42 | ```shell 43 | dbos-cloud db status 44 | ``` 45 | 46 | ### Database Schema Management 47 | 48 | Every time you deploy an application to DBOS Cloud, it runs all migrations defined in your `dbos-config.yaml`. 49 | 50 | Sometimes, it may be necessary to manually perform schema changes on a cloud database, for example to recover from a schema migration failure. 51 | To make this easier, you can retrieve your cloud database connection URL by running: 52 | 53 | ```shell 54 | dbos-cloud db url 55 | ``` 56 | 57 | You can then use it to run locally any migration command (for example, a down-migration command in your schema migration tool) and it will execute on your cloud database. 58 | 59 | :::warning 60 | While it is occasionally necessary, be careful when manually changing the schema on a production database. 61 | ::: 62 | 63 | :::warning 64 | Be careful making breaking schema changes such as deleting or renaming a column—they may break active workflows running on a previous application version. 65 | ::: 66 | 67 | ### Destroying Database Instances 68 | 69 | To destroy a database instance, run: 70 | 71 | ```shell 72 | dbos-cloud db destroy 73 | ``` 74 | 75 | :::warning 76 | Take care—this will irreversibly delete all data in the database instance. 77 | ::: 78 | -------------------------------------------------------------------------------- /docs/production/dbos-cloud/deploying-to-cloud.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 10 3 | title: Deploying to DBOS Cloud 4 | hide_table_of_contents: true 5 | --- 6 | import InstallNode from '/docs/partials/_install_node.mdx'; 7 | 8 | 9 | Any application built with DBOS can be deployed to DBOS Cloud. 10 | DBOS Cloud is a serverless platform for durably executed applications. 11 | It provides: 12 | 13 | - [**Application hosting and autoscaling**](./application-management.md): Managed hosting of your application in the cloud, automatically scaling to millions of users. Applications are charged only for the CPU time they actually consume. 14 | - [**Managed workflow recovery**](./application-management.md): If a cloud executor is interrupted, crashed, or restarted, each of its workflows is automatically recovered by another executor. 15 | - [**Workflow and queue management**](./workflow-management.md): Dashboards of all active and past workflows and all queued tasks, including their status, inputs, outputs, and steps. Cancel, resume, or restart any workflow execution and manage the tasks in your distributed queues. 16 | 17 | ## Deploying Your App to DBOS Cloud 18 | 19 | 20 | 21 | 22 | #### 1. Install the DBOS Cloud CLI 23 |
24 |
25 | 26 | The Cloud CLI requires Node.js 20 or later. 27 |
28 | 29 |
30 | 31 |
32 | Instructions to install Node.js 33 | 34 | 35 | 36 |
37 |
38 | 39 |
40 | Run this command to install it. 41 |
42 | 43 |
44 | ```shell 45 | npm i -g @dbos-inc/dbos-cloud@latest 46 | ``` 47 |
48 |
49 | 50 | #### 2. Create a requirements.txt File 51 |
52 |
53 | Create a `requirements.txt` file listing your application's dependencies. 54 |
55 | 56 |
57 | 58 | ```shell 59 | pip freeze > requirements.txt 60 | ``` 61 | 62 |
63 |
64 | 65 | #### 3. Define a Start Command 66 |
67 |
68 | Set the `start` command in the `runtimeConfig` section of your [`dbos-config.yaml`](../../python/reference/configuration.md) to your application's launch command. 69 | 70 | If your application includes an HTTP server, configure it to listen on port 8000. 71 | 72 | To test that it works, try launching your application with `dbos start`. 73 |
74 | 75 |
76 | 77 | ```yaml 78 | runtimeConfig: 79 | start: 80 | - "fastapi run" 81 | ``` 82 | 83 |
84 |
85 | 86 | #### 4. Deploy to DBOS Cloud 87 |
88 |
89 | Run this single command to deploy your application to DBOS Cloud! 90 |
91 | 92 |
93 | 94 | ```shell 95 | dbos-cloud app deploy 96 | ``` 97 | 98 |
99 |
100 | 101 |
102 | 103 | 104 | 105 | #### 1. Install the DBOS Cloud CLI 106 |
107 | 108 |
109 | Run this command to install the Cloud CLI globally. 110 |
111 | 112 |
113 | ```shell 114 | npm i -g @dbos-inc/dbos-cloud@latest 115 | ``` 116 |
117 |
118 | 119 | #### 2. Define a Start Command 120 |
121 |
122 | 123 | Set the `start` command in the `runtimeConfig` section of your [`dbos-config.yaml`](../../typescript/reference/configuration.md) to your application's launch command. 124 | 125 | If your application includes an HTTP server, configure it to listen on port 3000. 126 | 127 | To test that it works, try launching your application with `npxdbos start`. 128 |
129 | 130 |
131 | 132 | ```yaml 133 | runtimeConfig: 134 | start: 135 | - "npm start" 136 | ``` 137 | 138 |
139 |
140 | 141 | #### 3. Deploy to DBOS Cloud 142 |
143 |
144 | Run this single command to deploy your application to DBOS Cloud! 145 |
146 | 147 |
148 | 149 | ```shell 150 | dbos-cloud app deploy 151 | ``` 152 | 153 |
154 |
155 | 156 |
157 |
-------------------------------------------------------------------------------- /docs/production/dbos-cloud/monitoring-dashboard.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 70 3 | title: Monitoring Your Applications 4 | --- 5 | 6 | The [DBOS Cloud Console](https://console.dbos.dev) provides several tools to monitor your applications. 7 | 8 | ### Logs 9 | 10 | You can view your application's logs from your application's cloud console page. 11 | Logs are paginated and ordered chronologically. 12 | 13 | ![Logs](./assets/cc-logs.png) 14 | 15 | ### Traces 16 | 17 | You can view traces for all your applications from the cloud console [traces page](https://console.dbos.dev/traces). 18 | You can filter traces by application, time, operation, type, and status. 19 | Traces are sorted chronologically and displayed hierarchically. 20 | You can click on a trace or span to see detailed information about it. 21 | 22 | ![Logs](./assets/cc-traces.png) 23 | 24 | 25 | ### Grafana Dashboard 26 | 27 | You can launch a Grafana dashboard for DBOS Cloud from the cloud console [dashboard page](https://console.dbos.dev/dashboard). 28 | 29 | #### Time Selection 30 | 31 | In the top-right corner, the Grafana dashboard provides a time selector, defaulting to the last hour. You can change this setting to navigate to a different window of time. All of the panels are filtered for the selected time interval. 32 | ![Time picker](./assets/time_picker.png) 33 | 34 | Under the time selector you can find time series for active CPU milliseconds used by your apps and the counts of logs and traces generated by your applications, summarized for every minute. These panes have a matched time axis. You can click and drag across an interesting region in the series to "zoom in". 35 | ![Series](./assets/timeseries.png) 36 | 37 | This will update the time selector and, therefore, all other panels. You can then use the time selector to "zoom back out" or use the `<` and `>` buttons to move backwards and forwards in time. 38 | 39 | #### Grafana Logs and Traces 40 | 41 | On the left side, the Grafana dashboard provides a log view with entries generated by your applications arranged chronologically. The pane displays up to 1,000 most recent log records in the selected time period. Log records are color coded by severity level. Special entries for application lifetime events are colored grey and labeled as `[APP REGISTER]`, `[APP DEPLOY]` and so on. These are generated by DBOS Cloud automatically and not shown in the summarized log counts. You can click on a log record to browse additional metadata. In the example below, we see logs for an example app first getting registered, then undergoing schema migration, then getting deployed: 42 | ![Logs](./assets/log.png) 43 | 44 | Under the logs pane, there is an expandable panel of traces. Each row corresponds to a handler, workflow, transaction or step span. Each span is timestamped and decorated with duration in milliseconds, the IDs of the trace and workflow it belongs to, its execution status, and other information. 45 | 46 | #### Filtering 47 | 48 | In the top-right corner of the Grafana dashboard, there are filtering selectors: 49 | ![Filters](./assets/filters.png) 50 | 1. you can select a single `Application Name` to filter for. Refresh the browser to update the list of names for a new app. 51 | 2. you can paste a specific `Trace ID` to only view logs and spans for that Trace. To clear, erase the text and press "return." 52 | 3. similar to Trace ID you can copy-paste a specific `Workflow UUID` to filter by that. It is cleared the same way as Trace ID. 53 | 4. you can set `Trace Operation Type` to filter for "handler", "workflow", "transaction" or "step" spans. 54 | 6. typing text into `Search` filters logs for a particular text string and filters traces for a particular function name. 55 | 56 | :::tip 57 | When turning on these filters, the time window filter also still applies. You may see more data for your selection if you "zoom out" in time. 58 | ::: 59 | 60 | When using `Workflow UUID` use `_` to match any one character and `%` to match any string (SQL 'like' notation). This is useful for selecting groups of scheduled workflows. For example you can use a string like `sched%T19%` to match any scheduled workflows that ran at 7PM on any of the days in the selected time interval. `Search` also supports this syntax. 61 | 62 | #### Requests and CPU Milliseconds 63 | 64 | The Grafana dashboard tracks the total requests and active CPU milliseconds for all your apps. These totals are updated every time you refresh your dashboard. They are applied against your DBOS Pricing tier's [execution time limit](https://www.dbos.dev/pricing). Please allow up to 20 seconds of delay between an event happening and the dashboard refresh showing it. 65 | 66 | The number of total active CPU milliseconds since the start of the month is at the top in orange. The light orange "selection" number to the right changes with the selected app(s) and time window. You can select a particular app or workload and see how much it contributes to your total. 67 | 68 | ![Execution Seconds](./assets/execution-seconds.png) 69 | 70 | :::tip 71 | It is possible for one or two small API calls to not consume a measurable amount of CPU ms. It is also normal for an idle app to use a negligible amount of CPU ms for periodic health checks and background tasks. For best results, run an example workflow of at least 10 API calls (the more the better). Observe how much CPU ms your example uses and extrapolate to your monthly expected usage. 72 | ::: 73 | 74 | The total number of Requests since the start of the month is in the top left corner in purple. You can find the total number of requests for a specific app or time window by setting `Trace Operation Type` filter to `handler` and looking at the count at the bottom of the "Traces collected" plot. 75 | 76 | #### Memory and CPU Metrics 77 | 78 | Micro VM Metrics is an expandable panel under the Traces panel. This shows the number of running micro VMs, their RAM and CPU usage over time. These plots are filtered for time and the selected app. If you're running multiple VMs, the CPU % and RAM usage plots show a separate colored line for each VM. 79 | 80 | #### Dashboards and Organizations 81 | 82 | If you are part of a multi-user organization, your Grafana dashboard will show data for all applications deployed by all users in the organization. The log entries for application lifetime events (labeled as `[APP REGISTER]`, `[APP DEPLOY]` and so on) are annotated with the email address of the user performing each action. 83 | -------------------------------------------------------------------------------- /docs/production/dbos-cloud/otel-integration.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 75 3 | title: Export Logs and Traces 4 | --- 5 | 6 | This tutorial shows how to configure your DBOS Cloud application to export OpenTelemetry logs and traces to a third party observability service. If your service accepts the OTEL format, you can skip steps 1 and 2. Simply pass environment variables like `OTEL_EXPORTER_OTLP_HEADERS` as [app secrets](./secrets.md) (see [step 3](#3-set-the-datadog-api-key-to-your-apps-environment)) and then configure logs and traces enpdoints as shown in [step 4](#4-configure-your-app-to-export-logs-and-traces-to-otel-contrib). 7 | 8 | Other services may require additional software. Here we use Datadog as an example. We connect by installing the otel-contrib package in the App VM at deployment time and configuring it with the Datadog API key to export data. 9 | 10 | :::info 11 | These steps require a [DBOS Pro or Enterprise](https://www.dbos.dev/pricing) subscription. 12 | ::: 13 | 14 | 15 | ## 1. Create a Custom VM Setup Script 16 | 17 | In your app directory (next to `dbos-config.yaml`) create the following script called `build.sh`. Make sure to set its permissions to execute. 18 | 19 | ```bash 20 | #!/bin/bash 21 | 22 | # Download and install otel-contrib in the MicroVM 23 | curl -L -O https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v0.121.0/otelcol-contrib_0.121.0_linux_amd64.deb 24 | dpkg -i otelcol-contrib_0.121.0_linux_amd64.deb 25 | rm otelcol-contrib_0.121.0_linux_amd64.deb 26 | 27 | # Configure and enable it 28 | cat < /etc/otelcol-contrib/config.yaml 29 | receivers: 30 | otlp: 31 | protocols: 32 | grpc: 33 | http: 34 | endpoint: "0.0.0.0:4318" 35 | 36 | processors: 37 | batch: 38 | 39 | exporters: 40 | datadog: 41 | api: 42 | site: datadoghq.com #this URL depends on your datadog region 43 | key: ${DATADOG_API_KEY} #this is passed in a secret or env (see below) 44 | service: 45 | pipelines: 46 | metrics: 47 | receivers: [otlp] 48 | processors: [batch] 49 | exporters: [datadog] 50 | traces: 51 | receivers: [otlp] 52 | processors: [batch] 53 | exporters: [datadog] 54 | logs: 55 | receivers: [otlp] 56 | processors: [batch] 57 | exporters: [datadog] 58 | EOF 59 | 60 | systemctl restart otelcol-contrib 61 | systemctl enable otelcol-contrib 62 | ``` 63 | 64 | ## 2. Configure Your App to Run the Script on Deploy 65 | 66 | Add the build.sh script as a custom setup to your runtimeConfig in your `dbos-config.yaml`. See [Customizing MicroVM Setup](./application-management#customizing-microvm-setup) for more info. 67 | ```yaml 68 | runtimeConfig: 69 | setup: 70 | - "./build.sh" 71 | start: 72 | - npm run start #or your custom start command 73 | ``` 74 | 75 | ## 3. Set the Datadog API Key to Your App's Environment 76 | 77 | After registering your app, set the API key like so: 78 | 79 | ```bash 80 | dbos-cloud app register -d 81 | dbos-cloud app secrets create -s DATADOG_API_KEY -v 678... #your key value 82 | ``` 83 | The script we created in step 1 will read this value and pass it to `otel-contrib`. 84 | 85 | ## 4. Configure your App to Export Logs and Traces to otel-contrib 86 | 87 | In the app code, when creating the `DBOS` object, pass in the Logs and Traces endpoints like so: 88 | 89 | 90 | 91 | ```python 92 | from dbos import DBOSConfig 93 | config: DBOSConfig = { 94 | "name": "your-app-name", 95 | "otlp_traces_endpoints": [ "http://0.0.0.0:4318/v1/traces" ], #match the config in step 1 above 96 | "otlp_logs_endpoints": [ "http://0.0.0.0:4318/v1/logs" ] 97 | } 98 | DBOS(fastapi=app, config=config) 99 | ``` 100 | 101 | 102 | ```typescript 103 | DBOS.setConfig({ 104 | "name": "your-app-name", 105 | "otlpTracesEndpoints": [ "http://0.0.0.0:4318/v1/traces" ], 106 | "otlpLogsEndpoints": [ "http://0.0.0.0:4318/v1/logs" ] 107 | }); 108 | await DBOS.launch({ expressApp: app }); 109 | ``` 110 | 111 | 112 | 113 | ## 5. Add RAM if Needed, and Deploy! 114 | 115 | Depending on your app’s other memory usage, you may need to increase your RAM limit to make room for the otel-contrib process. 116 | 117 | ```bash 118 | dbos-cloud app update --executors-memory-mib 1024 119 | dbos-cloud app deploy 120 | ``` 121 | 122 | Within a few minutes of deploying you should see your logs appear in Datadog. 123 | 124 | -------------------------------------------------------------------------------- /docs/production/dbos-cloud/secrets.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 100 3 | title: Secrets and Environment Variables 4 | --- 5 | 6 | We recommend using _secrets_ to securely manage your application's secrets and environment variables in DBOS Cloud. 7 | Secrets are key-value pairs that are securely stored in DBOS Cloud and made available to your application as environment variables. 8 | Redeploy your application for newly created or updated secrets to take effect. 9 | 10 | ## Managing and Using Secrets 11 | 12 | You can create or update a secret using the Cloud CLI: 13 | 14 | ``` 15 | dbos-cloud app env create -s -v 16 | ``` 17 | 18 | :::info 19 | A few secrets name are reserved and cannot be used. These are `DBOS_DATABASE_URL` and `DBOS_APP_HOSTNAME`. 20 | ::: 21 | 22 | For example, to create a secret named `API_KEY` with value `abc123`, run: 23 | 24 | ``` 25 | dbos-cloud app env create -s API_KEY -v abc123 26 | ``` 27 | 28 | When you next redeploy your application, its environment will be updated to contain the `API_KEY` environment variable with value `abc123`. 29 | You can access it like any other environment variable: 30 | 31 | 32 | 33 | 34 | ```python 35 | key = os.environ['API_KEY'] # Value is abc123 36 | ``` 37 | 38 | 39 | 40 | 41 | ```typescript 42 | const key = process.env.API_KEY; // Value is abc123 43 | ``` 44 | 45 | 46 | 47 | Additionally, you can manage your application's secrets from the secrets page of the [cloud console](https://console.dbos.dev). 48 | 49 | Secrets Page 50 | 51 | ## Importing Secrets 52 | 53 | You can import the contents of a `.env` file as secrets. 54 | Allowed syntax for the `.env` file is described [here](https://dotenvx.com/docs/env-file). Note that interpolation is supported but command substitution and encryption are currently not. 55 | Import a `.env` file with the following command: 56 | 57 | ```shell 58 | dbos-cloud app env import -d 59 | ``` 60 | 61 | For example: 62 | 63 | 64 | ```shell 65 | dbos-cloud app env import -d .env 66 | ``` 67 | 68 | ## Listing Secrets 69 | 70 | You can list the names of your application's secrets with: 71 | 72 | ```shell 73 | dbos-cloud app env list 74 | ``` 75 | 76 | ## Deleting a Secret 77 | 78 | You can delete an environment variable with: 79 | 80 | ```shell 81 | dbos-cloud app env delete -s 82 | ``` 83 | 84 | -------------------------------------------------------------------------------- /docs/production/dbos-cloud/workflow-management.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 21 3 | title: Workflow Management 4 | --- 5 | 6 | ## Viewing Workflows 7 | 8 | Navigate to the workflows tab of your application's page on the DBOS Console to see a list of its workflows: 9 | 10 | Workflow List 11 | 12 | This includes **all** your application's workflows: those currently executing, those enqueued for execution, those that have completed successfully, and those that have failed. 13 | You can filter by time, workflow ID, workflow name, and workflow status (for example, you can search for all failed workflow executions in the past day). 14 | 15 | Click on a workflow to see details, including its input and output: 16 | 17 | Workflow List 18 | 19 | Click "Show Workflow Steps" to view the workflow's execution graph (including the workflow, its steps, and its child workflows and their steps). 20 | For example, here is the graph of a workflow that processes multiple tasks concurrently by enqueueing child workflows: 21 | 22 | Workflow List 23 | 24 | ## Viewing Queues 25 | 26 | Navigate to the queues tab of your application's page to see all **currently enqueued** workflows. 27 | This page only shows workflows that are currently executing on a queue (`PENDING` status) or are enqueued for execution (`ENQUEUED` status). 28 | By default, the oldest (first enqueued) workflows are shown first. 29 | You can click on workflows to expand them and see their steps, just as in the workflows page. 30 | 31 | Workflow List 32 | 33 | ## Workflow Management 34 | 35 | You can manage individual workflows directly from the DBOS console. 36 | 37 | #### Cancelling Workflows 38 | 39 | You can cancel any `PENDING` or `ENQUEUED` workflow. 40 | Cancelling a workflow sets is status to `CANCELLED`. 41 | If the workflow is currently executing, cancelling it preempts its execution (interrupting it at the beginning of its next step). 42 | If the workflow is enqueued, cancelling removes it from the queue. 43 | 44 | #### Resuming Workflows 45 | 46 | You can resume any `ENQUEUED`, `CANCELLED` or `RETRIES_EXCEEDED` workflow. 47 | Resuming a workflow resumes its execution from its last completed step. 48 | If the workflow is enqueued, this bypasses the queue to start it immediately. 49 | 50 | #### Forking Workflows 51 | 52 | You can start a new execution of a workflow by **forking** it from a specific step. 53 | To do this, open the workflow steps view, select a particular step, and click "Fork". 54 | 55 | When you fork a workflow, DBOS generates a new workflow with a new workflow ID, copies to that workflow the original workflow's inputs and all its steps up to the selected step, then begins executing the new workflow from the selected step. 56 | 57 | Forking a workflow is useful for recovering from outages in downstream services (by forking from the step that failed after the outage is resolved) or for "patching" workflows that failed due to a bug in a previous application version (by forking from the bugged step to an appliation version on which the bug is fixed). -------------------------------------------------------------------------------- /docs/production/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 10 3 | title: Deploying To Production 4 | --- 5 | 6 | Once you've made your application durable with DBOS, there are two ways to take it into production: 7 | 8 | ## Self-Hosting 9 | 10 | DBOS is a library. 11 | You can import it in existing applications and keep deploying them on your infrastructure using your existing build, test, and deploy tools. 12 | 13 | To simplify managing and recovering your durable workflows in production, we recommend connecting your production applications to DBOS Conductor. 14 | Conductor is a managed service that helps you operate DBOS applications. 15 | It provides: 16 | 17 | - [**Distributed workflow recovery**](./self-hosting/workflow-recovery.md): In a distributed environment with many executors running durable workflows, Conductor automatically detects when the execution of a durable workflow is interrupted (for example, if its executor is restarted, interrupted, or crashes) and recovers the workflow to another healthy executor. 18 | - [**Workflow and queue observability**](./self-hosting/workflow-management.md): Conductor provides dashboards of all active and past workflows and all queued tasks, including their status, inputs, outputs, and steps. 19 | - [**Workflow and queue management**](./self-hosting/workflow-management.md): From the Conductor dashboard, cancel, resume, or restart any workflow execution and manage the tasks in your distributed queues. 20 | 21 | Conductor is not part of your application's critical path, so it does not add runtime overhead. 22 | If your connection to Conductor is interrupted, your applications will continue operating normally. 23 | Recovery, observability, and workflow management will automatically resume once connectivity is restored. 24 | 25 | ## DBOS Cloud 26 | 27 | Any application built with DBOS can be deployed to DBOS Cloud. 28 | DBOS Cloud is a serverless platform for durably executed applications. 29 | It provides: 30 | 31 | - [**Application hosting and autoscaling**](./dbos-cloud/application-management.md): Managed hosting of your application in the cloud, automatically scaling to millions of users. Applications are charged only for the CPU time they actually consume. 32 | - [**Managed workflow recovery**](./dbos-cloud/application-management.md): If a cloud executor is interrupted, crashed, or restarted, each of its workflows is automatically recovered by another executor. 33 | - [**Workflow and queue observability**](./dbos-cloud/workflow-management.md): Dashboards of all active and past workflows and all queued tasks, including their status, inputs, outputs, and steps. 34 | - [**Workflow and queue management**](./dbos-cloud/workflow-management.md): From an online dashboard, cancel, resume, or restart any workflow execution and manage the tasks in your distributed queues. -------------------------------------------------------------------------------- /docs/production/self-hosting/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Self-Hosting", 3 | "position": 20 4 | } 5 | -------------------------------------------------------------------------------- /docs/production/self-hosting/admin-api.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 100 3 | title: Admin API Reference 4 | --- 5 | 6 | The DBOS library exposes an admin API to perform operations on durable workflows. 7 | By default, this API is on port 3001, though this is configurable. 8 | 9 | ### Health Check 10 | 11 | - **Endpoint**: `/dbos-healthz` 12 | - **HTTP Method**: GET 13 | - **Description**: Performs a health check on the application. 14 | - **Response**: 15 | - **Status Code**: 200 OK if the system is healthy; otherwise, appropriate error codes. 16 | 17 | ### Workflow Recovery 18 | 19 | - **Endpoint**: `/dbos-workflow-recovery` 20 | - **Method**: POST 21 | - **Description**: Recover all pending workflows associated with input executor IDs. Returns the IDs of all workflows queued for recovery. 22 | - **Request Body Format**: JSON list of executors whose pending workflows to recover. 23 | - **Example**: 24 | ```json 25 | ["executor-id-1", "executor-id-2", "..."] 26 | ``` 27 | - **Response**: 28 | - **Status Code**: 200 OK on successful recovery initiation; otherwise, appropriate error codes. 29 | - **Body Format**: JSON list of the IDs of workflows queued for recovery. 30 | - **Example**: 31 | ```json 32 | ["workflow-uuid-1", "workflow-uuid-2", "..."] 33 | ``` 34 | 35 | ### Deactivate 36 | 37 | - **Endpoint**: `/deactivate` 38 | - **Method**: GET 39 | - **Description**: Deactivate an executor. A deactivated executor may complete active workflows and recover `PENDING` workflows, but may not start new workflows or dequeue workflows. 40 | - **Response**: 41 | - **Status Code**: 200 OK if the request succeeeded; otherwise, appropriate error codes. 42 | -------------------------------------------------------------------------------- /docs/production/self-hosting/conductor.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 10 3 | title: DBOS Conductor 4 | --- 5 | 6 | The simplest way to operate DBOS durable workflows in production is to connect your application to DBOS Conductor. 7 | Conductor is a managed service that helps you operate DBOS applications. 8 | It provides: 9 | 10 | - [**Distributed workflow recovery**](./workflow-recovery.md): In a distributed environment with many executors running durable workflows, Conductor automatically detects when the execution of a durable workflow is interrupted (for example, if its executor is restarted, interrupted, or crashes) and recovers the workflow to another healthy executor. 11 | - [**Workflow and queue observability**](./workflow-management.md): Conductor provides dashboards of all active and past workflows and all queued tasks, including their status, inputs, outputs, and steps. 12 | - [**Workflow and queue management**](./workflow-management.md): From the Conductor dashboard, cancel, resume, or restart any workflow execution and manage the tasks in your distributed queues. 13 | 14 | 15 | ## Connecting To Conductor 16 | 17 | :::tip 18 | Conductor is not part of your application's critical path. 19 | If your connection to Conductor is interrupted, your applications will continue operating normally. 20 | Recovery, observability, and workflow management will automatically resume once connectivity is restored. 21 | ::: 22 | 23 | To connect your application to Conductor, first register your application on the [DBOS console](https://console.dbos.dev). 24 | **The name you register must match the name you give your application in its configuration.** 25 | 26 | Workflow List 27 | 28 | Next, generate an API key. 29 | By default, API keys do not expire, though they may be revoked at any time from the [key settings page](https://staging.console.dbos.dev/settings/apikey): 30 | 31 | Workflow List 32 | 33 | Finally, supply that API key to your DBOS application to connect it to Conductor. 34 | This initiates a websocket connection with Conductor: 35 | 36 | 37 | 38 | ```python 39 | conductor_key=os.environ.get("DBOS_CONDUCTOR_KEY", None) 40 | DBOS(conductor_key=conductor_key) 41 | ``` 42 | 43 | 44 | 45 | ```javascript 46 | const conductorKey = process.env.DBOS_CONDUCTOR_KEY 47 | await DBOS.launch({conductorKey}) 48 | ``` 49 | 50 | 51 | 52 | ## Managing Conductor Applications 53 | 54 | You can view all applications registered with Conductor on the DBOS Console: 55 | 56 | Workflow List 57 | 58 | On your application's page, you can see all executors (processes) running that application that are currently connected to Conductor. 59 | Executors are identified by a unique ID that they generate and print on startup. 60 | When you restart an executor, it generates a new ID. 61 | 62 | Workflow List 63 | 64 | Conductor uses a WebSocket-based protocol to exchange workflow metadata and commands with your application. An application is shown as _available_ in Conductor when at least one of its processes is connected. Conductor has no access to your application's database or other private data. As a result, workflow-related features are only available while your application is connected to Conductor over this metadata-only connection. 65 | -------------------------------------------------------------------------------- /docs/production/self-hosting/hosting-with-docker.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 69 3 | title: Deploying With Docker 4 | --- 5 | 6 | 7 | # Deploying with Docker 8 | 9 | This guide shows you how to setup a starter DBOS Python application and its Postgres database using Docker. 10 | 11 | ## Dockerfile 12 | 13 | First, we'll setup a Dockerfile to configure a Debian Python image. 14 | 15 | There's nothing unique about this Dockerfile—DBOS is just a library for your program to import, so it can run in any Docker container that has the appropriate language runtime (Node or Python) installed. 16 | The container initializes a DBOS starter template then starts the application. 17 | 18 | ```bash 19 | FROM python:3.11-slim 20 | 21 | WORKDIR /app 22 | 23 | RUN python3 -m venv .venv && \ 24 | . .venv/bin/activate && \ 25 | pip install --upgrade pip && \ 26 | pip install dbos && \ 27 | dbos init --template dbos-app-starter 28 | 29 | EXPOSE 8000 30 | 31 | CMD ["/bin/bash", "-c", ". .venv/bin/activate && dbos start"] 32 | ``` 33 | 34 | ## Docker compose 35 | 36 | Next, we'll use Docker compose to start two containers: one for Postgres and one for the DBOS Python app. 37 | Note we set `restart` to `unless-stopped` so the container automatically restarts if the application crashes. 38 | 39 | ```yaml 40 | version: '3.9' 41 | 42 | services: 43 | db: 44 | image: pgvector/pgvector:pg16 45 | environment: 46 | POSTGRES_USER: postgres 47 | POSTGRES_PASSWORD: dbos 48 | ports: 49 | - "5432:5432" 50 | volumes: 51 | - pgdata:/var/lib/postgresql/data 52 | 53 | app: 54 | build: 55 | context: . 56 | dockerfile: Dockerfile 57 | ports: 58 | - "8000:8000" 59 | depends_on: 60 | - db 61 | environment: 62 | DBOS_DATABASE_URL: postgres://postgres:dbos@db:5432/dbos_app_starter 63 | restart: unless-stopped 64 | 65 | volumes: 66 | pgdata: 67 | ``` 68 | 69 | ## Access the application 70 | 71 | Bring the containers up with `docker-compose up --build`. 72 | 73 | You can now visit `http://localhost:8000/` to see the template application live. 74 | If you press "crash the application", Docker will restart the container immediately and the DBOS workflow will resume durably. -------------------------------------------------------------------------------- /docs/production/self-hosting/hosting-with-kubernetes.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 70 3 | title: Deploying With Kubernetes 4 | --- 5 | 6 | 7 | # Deploying with Kubernetes 8 | 9 | 10 | This guide shows you how to setup a DBOS Python application and its Postgres database using Kubernetes. 11 | 12 | It assume you have an existing Kubernetes service up and running. 13 | 14 | You'll need two manifests: one for Postgres and one for the application. 15 | 16 | ## Building the Application Image 17 | 18 | 19 | DBOS is just a library for your program to import, so it can run with any Python/Node program. For a reference Dockerfile to build a container an upload it to your registry, see our [Docker guide](./hosting-with-docker.md). Deploy both services with `kubectl apply -f [manifest.yaml]` 20 | 21 | 22 | ## Application service 23 | 24 | Replace `image URI` by the address of your container. 25 | 26 | ```yaml 27 | apiVersion: apps/v1 28 | kind: Deployment 29 | metadata: 30 | name: dbos-app 31 | spec: 32 | replicas: 1 33 | selector: 34 | matchLabels: 35 | app: dbos-app 36 | template: 37 | metadata: 38 | labels: 39 | app: dbos-app 40 | spec: 41 | containers: 42 | - name: dbos-app 43 | image: 44 | env: 45 | - name: DBOS_DATABASE_URL 46 | value: postgres://postgres:dbos@postgres:5432/dbos_app_starter 47 | ports: 48 | - containerPort: 8000 49 | --- 50 | apiVersion: v1 51 | kind: Service 52 | metadata: 53 | name: dbos-app 54 | spec: 55 | type: LoadBalancer 56 | selector: 57 | app: dbos-app 58 | ports: 59 | - port: 8000 60 | targetPort: 8000 61 | ``` 62 | 63 | 64 | ## Postgres Service 65 | 66 | 67 | ```yaml 68 | apiVersion: apps/v1 69 | kind: Deployment 70 | metadata: 71 | name: postgres 72 | spec: 73 | replicas: 1 74 | selector: 75 | matchLabels: 76 | app: postgres 77 | template: 78 | metadata: 79 | labels: 80 | app: postgres 81 | spec: 82 | containers: 83 | - name: postgres 84 | image: pgvector/pgvector:pg16 85 | env: 86 | - name: POSTGRES_USER 87 | value: "postgres" 88 | - name: POSTGRES_PASSWORD 89 | value: "dbos" 90 | ports: 91 | - containerPort: 5432 92 | volumeMounts: 93 | - mountPath: /var/lib/postgresql/data 94 | name: postgres-storage 95 | volumes: 96 | - name: postgres-storage 97 | emptyDir: {} 98 | --- 99 | apiVersion: v1 100 | kind: Service 101 | metadata: 102 | name: postgres 103 | spec: 104 | selector: 105 | app: postgres 106 | ports: 107 | - port: 5432 108 | targetPort: 5432 109 | ``` 110 | 111 | 112 | ## Visit the application 113 | 114 | Check the services are running with `kubectl` or your favorite k8s admin tool. 115 | 116 | ```shell 117 | kubectl get pods 118 | NAME READY STATUS RESTARTS AGE 119 | dbos-app-6d968b9dc6-lsk6w 1/1 Running 0 105m 120 | postgres-9f65bff75-ztm7w 1/1 Running 0 107m 121 | ``` 122 | 123 | Find the public IP of the application with `kubectl`: 124 | ```shell 125 | kubectl get svc dbos-app 126 | NAME TYPE CLUSTER-IP EXTERNAL-IP PORT(S) AGE 127 | dbos-app LoadBalancer x.x.x.x x.x.x.x 8000:30176/TCP 106m 128 | ``` 129 | 130 | You can now visit `http://[EXTERNAL-IP]:8000/` to see the template application live. 131 | If you press "crash the application", Kubernetes will restart the container immediately and the DBOS workflow will resume durably. -------------------------------------------------------------------------------- /docs/production/self-hosting/workflow-management.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 30 3 | title: Workflow Management 4 | --- 5 | 6 | :::info 7 | Workflow observability and management features are only available for applications connected to [Conductor](./conductor.md). 8 | ::: 9 | 10 | ## Viewing Workflows 11 | 12 | Navigate to the workflows tab of your application's page on the DBOS Console to see a list of its workflows: 13 | 14 | Workflow List 15 | 16 | This includes **all** your application's workflows: those currently executing, those enqueued for execution, those that have completed successfully, and those that have failed. 17 | You can filter by time, workflow ID, workflow name, and workflow status (for example, you can search for all failed workflow executions in the past day). 18 | 19 | Click on a workflow to see details, including its input and output: 20 | 21 | Workflow List 22 | 23 | Click "Show Workflow Steps" to view the workflow's execution graph (including the workflow, its steps, and its child workflows and their steps). 24 | For example, here is the graph of a workflow that processes multiple tasks concurrently by enqueueing child workflows: 25 | 26 | Workflow List 27 | 28 | ## Viewing Queues 29 | 30 | Navigate to the queues tab of your application's page to see all **currently enqueued** workflows. 31 | This page only shows workflows that are currently executing on a queue (`PENDING` status) or are enqueued for execution (`ENQUEUED` status). 32 | By default, the oldest (first enqueued) workflows are shown first. 33 | You can click on workflows to expand them and see their steps, just as in the workflows page. 34 | 35 | Workflow List 36 | 37 | ## Workflow Management 38 | 39 | You can manage individual workflows directly from the DBOS console. 40 | 41 | #### Cancelling Workflows 42 | 43 | You can cancel any `PENDING` or `ENQUEUED` workflow. 44 | Cancelling a workflow sets is status to `CANCELLED`. 45 | If the workflow is currently executing, cancelling it preempts its execution (interrupting it at the beginning of its next step). 46 | If the workflow is enqueued, cancelling removes it from the queue. 47 | 48 | #### Resuming Workflows 49 | 50 | You can resume any `ENQUEUED`, `CANCELLED` or `RETRIES_EXCEEDED` workflow. 51 | Resuming a workflow resumes its execution from its last completed step. 52 | If the workflow is enqueued, this bypasses the queue to start it immediately. 53 | 54 | #### Forking Workflows 55 | 56 | You can start a new execution of a workflow by **forking** it from a specific step. 57 | To do this, open the workflow steps view, select a particular step, and click "Fork". 58 | 59 | When you fork a workflow, DBOS generates a new workflow with a new workflow ID, copies to that workflow the original workflow's inputs and all its steps up to the selected step, then begins executing the new workflow from the selected step. 60 | 61 | Forking a workflow is useful for recovering from outages in downstream services (by forking from the step that failed after the outage is resolved) or for "patching" workflows that failed due to a bug in a previous application version (by forking from the bugged step to an appliation version on which the bug is fixed). -------------------------------------------------------------------------------- /docs/production/self-hosting/workflow-recovery.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 50 3 | title: Workflow Recovery 4 | --- 5 | 6 | When the execution of a durable workflow is interrupted (for example, if its executor is restarted, interrupted, or crashes), another executor must recover the workflow and resume its execution. 7 | To prevent duplicate work, it is important to detect interruptions promptly and to recover each workflow only once. 8 | This guide describes how to manage workflow recovery in a production environment. 9 | 10 | ## Managing Recovery 11 | 12 | ### Recovery On A Single Server 13 | 14 | If hosting an application on a single server without Conductor, each time you restart your application's process, DBOS recovers all workflows that were executing before the restart (all `PENDING` workflows). 15 | 16 | ### Recovery in a Distributed Setting 17 | 18 | When self-hosting in a distributed setting without Conductor, it is important to manage workflow recovery so that when an executor crashes, restarts, or is shut down, its workflows are recovered. 19 | You should assign each executor running a DBOS application an executor ID by setting the `DBOS__VMID` environment variable. 20 | Each workflow is tagged with the ID of the executor that started it. 21 | When an application with an executor ID restarts, it only recovers pending workflows assigned to that executor ID. 22 | You can also instruct your executor to recover workflows assigned to other executor IDs through the [workflow recovery endpoint of the admin API](./admin-api.md#workflow-recovery). 23 | 24 | ### Recovery With Conductor 25 | 26 | If your application is connected to [DBOS Conductor](./conductor.md), workflow recovery is automatic. 27 | When Conductor detects that an executor is unhealthy, it automatically signals another executor to recover its workflows. 28 | 29 | When an executor disconnects from Conductor, its status is changed to `DISCONNECTED` while Conductor waits for it to reconnect. 30 | If it has not reconnected after a grace period, its status is changed to `DEAD` and Conductor signals another executor of a compatible application version to recover its workflows. 31 | After recovery is confirmed, the executor is deleted. 32 | 33 | ## Managing Application versions 34 | 35 | When self-hosting, it is important to be careful when upgrading your application's code. 36 | When DBOS is launched, it computes an "application version" from a checksum of the code in your application's workflows (you can override this version through the `DBOS__APPVERSION` environment variable). 37 | Each workflow is tagged with the version of the application that started it. 38 | To prevent code compatibility issues, DBOS does not attempt to recover workflows tagged with a different application version. 39 | 40 | To safely recover workflows started on an older version of your code, you should start a process running that code version. 41 | If self-hosting using Conductor, that process will automatically recover all pending workflows of that code version. 42 | If self-hosting without Conductor, you should use the [workflow recovery endpoint of the admin API](./admin-api.md#workflow-recovery) to instruct that process to recover workflows belonging to executors that ran old code versions. -------------------------------------------------------------------------------- /docs/python/examples/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Example Applications", 3 | "position": 50 4 | } 5 | -------------------------------------------------------------------------------- /docs/python/examples/assets/cron-starter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/python/examples/assets/cron-starter.png -------------------------------------------------------------------------------- /docs/python/examples/assets/document_detective.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/python/examples/assets/document_detective.png -------------------------------------------------------------------------------- /docs/python/examples/assets/langgraph-agent-architect.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/python/examples/assets/langgraph-agent-architect.png -------------------------------------------------------------------------------- /docs/python/examples/assets/langgraph-agent-workflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/python/examples/assets/langgraph-agent-workflow.png -------------------------------------------------------------------------------- /docs/python/examples/assets/widget_store_ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/python/examples/assets/widget_store_ui.png -------------------------------------------------------------------------------- /docs/python/examples/cron-starter.md: -------------------------------------------------------------------------------- 1 | --- 2 | displayed_sidebar: examplesSidebar 3 | sidebar_position: 10 4 | title: Cloud Cron Quickstart 5 | hide_table_of_contents: true 6 | --- 7 | import InstallNode from '/docs/partials/_install_node.mdx'; 8 | 9 | Let's say you want to run some code **on a schedule**. For example, you want to: 10 | 11 | - Record a stock's price once a minute. 12 | - Migrate some data from one database to another once an hour. 13 | - Send emails to inactive users once a week. 14 | 15 | This kind of code isn't easy to manage because the server running it has to always be "on"—you can't just run it on your laptop. 16 | 17 | In this tutorial, we'll show you how to use DBOS to **run code on a schedule in the cloud** so you don't have to worry about maintaining it. 18 | You'll learn how to write a scheduled (cron) function in **just 6 lines of Python code** and deploy it to the cloud with **a single click**. 19 | 20 | ### Tutorial 21 | 22 | #### 1. Select the Cloud Cron Starter 23 | Visit [https://console.dbos.dev/launch](https://console.dbos.dev/launch) and select the DBOS Cron Starter. 24 | When prompted, create a database for your app with default settings. 25 | 26 | Cloud Console Templates 27 | 28 | #### 2. Connect to GitHub and Deploy to DBOS Cloud 29 | 30 | To ensure you can easily update your project after deploying it, DBOS will create a GitHub repository for you. 31 | You can deploy directly from that GitHub repository to DBOS Cloud. 32 | 33 | First, sign in to your GitHub account. 34 | Then, set your repository name and whether it should be public or private. 35 | 36 | Next, click "Create GitHub Repo and Deploy" and DBOS will clone a copy of the source code into your GitHub account, then deploy your project to DBOS Cloud. 37 | In less than a minute, your app should deploy successfully. 38 | 39 | Deploy with GitHub 40 | 41 | #### 3. View Your Application 42 | 43 | At this point, your app is running code on a schedule in the cloud! 44 | Visit its URL to see it—it should look like this: 45 | 46 | ![Cloud Cron Starter](./assets/cron-starter.png) 47 | 48 | To see your new app's code, visit your new GitHub repository and open `app/main.py`. 49 | The app schedules a function incrementing a counter to run once a minute (the cron syntax `* * * * *` means "once a minute"). 50 | The app page displays the current value of the counter. 51 | 52 | 53 | 54 | #### 4. Start Building 55 | 56 | To start building, edit your application on GitHub (source code is in `app/main.py`), commit your changes, then press "Deploy From GitHub" on your [applications page](https://console.dbos.dev/applications) to see your changes reflected in the live application. 57 | 58 | Deploy with GitHub 59 | 60 | 61 | ### Next Steps 62 | 63 | You can adapt this 6-line starter to implement your own scheduled job. 64 | Replace `scheduled_function` with your own function to run it on a schedule! 65 | Some useful implementation notes: 66 | 67 | - Schedules are specified in crontab syntax. 68 | For example, `* * * * *` means "run once a minute." 69 | To learn more about crontab syntax, see [this guide](https://docs.gitlab.com/ee/topics/cron/). 70 | - The two arguments passed into `scheduled_function` are the time the run was scheduled (as a `datetime`) and the time the run was actually started (as a `datetime`). 71 | - For more information, see the [scheduling documentation](../tutorials/scheduled-workflows.md). 72 | 73 | Here are two larger examples built with DBOS scheduling: 74 | 75 | - [**Hacker News Slackbot**](../examples/hacker-news-bot.md): Periodically search Hacker News for people commenting about serverless computing and post the comments to Slack. 76 | - [**Earthquake Tracker**](../examples/earthquake-tracker.md): Use a scheduled job to scrape earthquake data from the USGS, then build a real-time earthquake dashboard over it. 77 | 78 | ### Running It Locally 79 | 80 | You can also run your application locally for development and testing. 81 | 82 | #### 1. Git Clone Your Application 83 |
84 |
85 | Clone your application from git and enter its directory. 86 |
87 | 88 |
89 | 90 | ```shell 91 | git clone 92 | cd dbos-cron-starter 93 | ``` 94 | 95 |
96 |
97 | 98 | #### 2. Set up a virtual environment 99 |
100 |
101 | 102 | Create a virtual environment and install dependencies. 103 | 104 |
105 | 106 |
107 | 108 | 109 | 110 | ```shell 111 | python3 -m venv .venv 112 | source .venv/bin/activate 113 | pip install -r requirements.txt 114 | ``` 115 | 116 | 117 | ```shell 118 | python3 -m venv .venv 119 | .venv\Scripts\activate.ps1 120 | pip install -r requirements.txt 121 | ``` 122 | 123 | 124 | ```shell 125 | python3 -m venv .venv 126 | .venv\Scripts\activate.bat 127 | pip install -r requirements.txt 128 | ``` 129 | 130 | 131 | 132 |
133 |
134 | 135 | #### 3. Start Your Appliation 136 |
137 |
138 | 139 | Start your application with `dbos start`, then visit [`http://localhost:8000`](http://localhost:8000) to see it! 140 | 141 |
142 | 143 |
144 | ```shell 145 | dbos start 146 | ``` 147 |
148 | 149 |
150 | 151 | -------------------------------------------------------------------------------- /docs/python/examples/hacker-news-bot.md: -------------------------------------------------------------------------------- 1 | --- 2 | displayed_sidebar: examplesSidebar 3 | sidebar_position: 5 4 | title: Hacker News Slackbot 5 | --- 6 | 7 | In this example, we use DBOS to build and deploy a scheduled job that periodically searches Hacker News for people commenting about serverless computing and posts the comments to Slack. 8 | 9 | All source code is [available on GitHub](https://github.com/dbos-inc/dbos-demo-apps/tree/main/python/hackernews-alerts). 10 | 11 | ## Import and Initialize the App 12 | 13 | Let's start off with imports and initializing the DBOS app. 14 | 15 | ```python 16 | import html 17 | import os 18 | import re 19 | import threading 20 | from datetime import UTC, datetime, timedelta 21 | 22 | import requests 23 | import slack_sdk 24 | from dbos import DBOS, DBOSConfig 25 | 26 | config: DBOSConfig = { 27 | "name": "hackernews-alerts", 28 | } 29 | DBOS(config=config) 30 | ``` 31 | 32 | ## Searching Hacker News 33 | 34 | Next, let's write a function that searches Hacker News. 35 | This function uses Algolia's Hacker News Search API to find all comments in the last _N_ hours containing a search term. 36 | It returns matching comments and links to them. 37 | We annotate this function with `@DBOS.step` so later we can durably call it from our scheduled workflow. 38 | 39 | ```python 40 | @DBOS.step() 41 | def search_hackernews(query: str, window_size_hours: int): 42 | threshold = datetime.now(UTC) - timedelta(hours=window_size_hours) 43 | 44 | params = { 45 | "tags": "comment", 46 | "query": query, 47 | "numericFilters": f"created_at_i>{threshold.timestamp()}", 48 | } 49 | 50 | response = requests.get("http://hn.algolia.com/api/v1/search", params).json() 51 | 52 | hits = [] 53 | for hit in response["hits"]: 54 | # Reformat the comment by unescaping HTML, adding newlines, and removing HTML tags 55 | comment = hit["comment_text"] 56 | comment = re.sub("

", "\n", html.unescape(comment)) 57 | comment = re.sub("<[^<]+?>", "", comment) 58 | url = f"https://news.ycombinator.com/item?id={hit['objectID']}" 59 | hits.append((comment, url)) 60 | return hits 61 | ``` 62 | 63 | ## Posting to Slack 64 | 65 | Next, let's write a function that posts a Hacker News comment and its URL to Slack. 66 | This function requires a Slack bot token supplied through an environment variable. 67 | We'll explain later how to generate one. 68 | Again, we annotate this function with `@DBOS.step` so later we can durably call it from our scheduled workflow. 69 | 70 | ```python 71 | @DBOS.step() 72 | def post_to_slack(comment: str, url: str): 73 | message = f"{comment}\n\n{url}" 74 | client = slack_sdk.WebClient(token=os.environ["SLACK_HN_BOT_OAUTH_TOKEN"]) 75 | client.chat_postMessage( 76 | channel="hacker-news-alerts", 77 | text=message, 78 | unfurl_links=False, 79 | unfurl_media=False, 80 | ) 81 | ``` 82 | 83 | ## Scheduling the Search 84 | 85 | Next, let's write a scheduled job that runs the search every hour and posts its findings to Slack. 86 | The [`@DBOS.scheduled`](../tutorials/scheduled-workflows.md) decorator tells DBOS to run this function on a schedule defined in [crontab syntax](https://en.wikipedia.org/wiki/Cron), in this case once per hour. 87 | The [`@DBOS.workflow`](../tutorials/workflow-tutorial.md) decorator tells DBOS to durably execute this function, so it runs exactly-once per hour and you'll never miss a Hacker News comment or record a duplicate. 88 | 89 | ```python 90 | @DBOS.scheduled("0 * * * *") 91 | @DBOS.workflow() 92 | def run_hourly(scheduled_time: datetime, actual_time: datetime): 93 | results = search_hackernews("serverless", window_size_hours=1) 94 | for comment, url in results: 95 | post_to_slack(comment, url) 96 | DBOS.logger.info(f"Found {len(results)} comments at {str(actual_time)}") 97 | ``` 98 | 99 | Finally, in our main function, let's launch DBOS, then sleep the main thread forever while the scheduled job runs in the background: 100 | 101 | ```python 102 | if __name__ == "__main__": 103 | DBOS.launch() 104 | threading.Event().wait() 105 | ``` 106 | 107 | ## Try it Yourself! 108 | 109 | ### Setting Up Slack 110 | 111 | To run this app, you need a Slack bot token for your workspace. 112 | Follow [this tutorial](https://api.slack.com/tutorials/tracks/getting-a-token) to generate one. 113 | Your token should start with "xoxb". 114 | Set it as an environment variable like so: 115 | 116 | ```shell 117 | export SLACK_HN_BOT_OAUTH_TOKEN= 118 | ``` 119 | 120 | You should also create a Slack channel named `hacker-news-alerts` in your workspace for the bot to post to! 121 | 122 | ### Deploying to the Cloud 123 | 124 | To deploy this app as a persistent scheduled job to DBOS Cloud, first install the DBOS Cloud CLI (requires Node): 125 | 126 | ```shell 127 | npm i -g @dbos-inc/dbos-cloud 128 | ``` 129 | 130 | Then clone the [dbos-demo-apps](https://github.com/dbos-inc/dbos-demo-apps) repository and deploy: 131 | 132 | ```shell 133 | git clone https://github.com/dbos-inc/dbos-demo-apps.git 134 | cd python/hackernews-alerts 135 | dbos-cloud app deploy 136 | ``` 137 | You can visit the [DBOS Cloud Console](https://console.dbos.dev/login-redirect) to see your app's status and logs. 138 | 139 | ### Running Locally 140 | 141 | First, clone and enter the [dbos-demo-apps](https://github.com/dbos-inc/dbos-demo-apps) repository: 142 | 143 | ```shell 144 | git clone https://github.com/dbos-inc/dbos-demo-apps.git 145 | cd python/hackernews-alerts 146 | ``` 147 | 148 | Then create a virtual environment: 149 | 150 | ```shell 151 | python3 -m venv .venv 152 | source .venv/bin/activate 153 | ``` 154 | 155 | Then start your app: 156 | 157 | ```shell 158 | pip install -r requirements.txt 159 | dbos start 160 | ``` -------------------------------------------------------------------------------- /docs/python/integrating-dbos.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 20 3 | title: Add DBOS To Your App 4 | --- 5 | 6 | 7 | This guide shows you how to add the open-source [DBOS Transact](https://github.com/dbos-inc/dbos-transact-py) library to your existing application to **durably execute** it and make it resilient to any failure. 8 | 9 | ### 1. Install DBOS 10 | `pip install` DBOS into your application. 11 | 12 | ```shell 13 | pip install dbos 14 | ``` 15 | 16 | DBOS requires a Postgres database. 17 | If you already have Postgres, you can set the `DBOS_DATABASE_URL` environment variable to your connection string (later we'll pass that value into DBOS). 18 | Otherwise, you can start Postgres in a Docker container with this command: 19 | 20 | ```shell 21 | dbos postgres start 22 | ``` 23 | 24 | ### 2. Add the DBOS Initializer 25 | 26 | Add these lines of code to your program's main function. 27 | They initialize DBOS when your program starts. 28 | 29 | 30 | ```python 31 | import os 32 | from dbos import DBOS, DBOSConfig 33 | 34 | config: DBOSConfig = { 35 | "name": "my-app", 36 | "database_url": os.environ.get("DBOS_DATABASE_URL"), 37 | } 38 | DBOS(config=config) 39 | DBOS.launch() 40 | ``` 41 | 42 | ### 3. Start Your Application 43 | 44 | Try starting your application. 45 | If everything is set up correctly, your app should run normally, but log `Initializing DBOS` and `DBOS launched!` on startup. 46 | Congratulations! You've integrated DBOS into your application. 47 | 48 | 49 | ### 4. Start Building With DBOS 50 | 51 | At this point, you can add any DBOS decorator or method to your application. 52 | For example, you can annotate one of your functions as a [workflow](./tutorials/workflow-tutorial.md) and the functions it calls as [steps](./tutorials/step-tutorial.md). 53 | DBOS durably executes the workflow so if it is ever interrupted, upon restart it automatically resumes from the last completed step. 54 | 55 | You can add DBOS to your application incrementally—it won't interfere with code that's already there. 56 | It's totally okay for your application to have one DBOS workflow alongside thousands of lines of non-DBOS code. 57 | 58 | To learn more about programming with DBOS, check out [the guide](./programming-guide.md). 59 | 60 | 61 | ```python 62 | @DBOS.step() 63 | def step_one(): 64 | ... 65 | 66 | @DBOS.step() 67 | def step_two(): 68 | ... 69 | 70 | @DBOS.workflow() 71 | def workflow(): 72 | step_one() 73 | step_two() 74 | ``` -------------------------------------------------------------------------------- /docs/python/reference/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Reference", 3 | "position": 40 4 | } 5 | -------------------------------------------------------------------------------- /docs/python/reference/configuration.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 12 3 | title: Configuration 4 | --- 5 | 6 | ## Configuring DBOS 7 | 8 | To configure DBOS, pass a `DBOSConfig` object to its constructor. 9 | For example: 10 | 11 | ```python 12 | config: DBOSConfig = { 13 | "name": "dbos-example", 14 | "database_url": os.environ["DBOS_DATABASE_URL"], 15 | } 16 | DBOS(config=config) 17 | ``` 18 | 19 | The `DBOSConfig` object has the following fields. 20 | All fields except `name` are optional. 21 | 22 | ```python 23 | class DBOSConfig(TypedDict): 24 | name: str 25 | database_url: Optional[str] 26 | sys_db_name: Optional[str] 27 | sys_db_pool_size: Optional[int] 28 | db_engine_kwargs: Optional[Dict[str, Any]] 29 | log_level: Optional[str] 30 | otlp_traces_endpoints: Optional[List[str]] 31 | otlp_logs_endpoints: Optional[List[str]] 32 | admin_port: Optional[int] 33 | run_admin_server: Optional[bool] 34 | ``` 35 | 36 | - **name**: Your application's name. 37 | - **database_url**: A connection string to a Postgres database. DBOS uses this connection string, unmodified, to create a [SQLAlchemy engine](https://docs.sqlalchemy.org/en/20/core/engines.html). 38 | A valid connection string looks like: 39 | 40 | ``` 41 | postgresql://[username]:[password]@[hostname]:[port]/[database name] 42 | ``` 43 | 44 | :::info 45 | SQLAlchemy requires passwords in connection strings to be escaped if they contain special characters (e.g., with [urllib](https://docs.python.org/3/library/urllib.parse.html#urllib.parse.quote)). 46 | ::: 47 | 48 | If no connection string is provided, DBOS uses this default: 49 | 50 | ```shell 51 | postgresql://postgres:dbos@localhost:5432/application_name?connect_timeout=10 52 | ``` 53 | 54 | - **db_engine_kwargs**: Additional keyword arguments passed to SQLAlchemy’s [`create_engine()`](https://docs.sqlalchemy.org/en/20/core/engines.html#sqlalchemy.create_engine), applied to both the application and [system database](../../explanations/system-tables) engines. Defaults to: 55 | ```python 56 | { 57 | "pool_size": 20, 58 | "max_overflow": 0, 59 | "pool_timeout": 30, 60 | } 61 | ``` 62 | 63 | - **sys_db_pool_size**: The size of the connection pool used for the [DBOS system database](../../explanations/system-tables). Defaults to 20. 64 | - **sys_db_name**: Name for the [system database](../../explanations/system-tables) in which DBOS stores internal state. Defaults to `{database name}_dbos_sys`. 65 | - **otlp_traces_endpoints**: DBOS operations [automatically generate OpenTelemetry Traces](../tutorials/logging-and-tracing#tracing). Use this field to declare a list of OTLP-compatible trace receivers. 66 | - **otlp_logs_endpoints**: the DBOS logger can export OTLP-formatted log signals. Use this field to declare a list of OTLP-compatible log receivers. 67 | - **log_level**: Configure the [DBOS logger](../tutorials/logging-and-tracing#logging) severity. Defaults to `INFO`. 68 | - **run_admin_server**: Whether to run an [HTTP admin server](../../production/self-hosting/admin-api.md) for workflow management operations. Defaults to True. 69 | - **admin_port**: The port on which the admin server runs. Defaults to 3001. 70 | 71 | 72 | ## DBOS Configuration File 73 | 74 | 75 | Some tools in the DBOS ecosystem, including [DBOS Cloud](../../production/dbos-cloud/deploying-to-cloud.md) and the [DBOS debugger](../tutorials/debugging.md), are configured by a `dbos-config.yaml` file. 76 | 77 | You can create a `dbos-config.yaml` with default parameters with: 78 | 79 | ```shell 80 | dbos init --config 81 | ``` 82 | 83 | ### Configuration File Fields 84 | 85 | ::::info 86 | You can use environment variables for configuration values through the syntax `field: ${VALUE}`. 87 | :::: 88 | 89 | Each `dbos-config.yaml` file has the following fields and sections: 90 | 91 | - **name**: Your application's name. Must match the name supplied to the DBOS constructor. 92 | - **language**: The application language. Must be set to `python` for Python applications. 93 | - **database_url**: A connection string to a Postgres database. This connection string is used only by the [DBOS debugger](../tutorials/debugging.md). It has the same format as the connection string you pass to the DBOS constructor. 94 | - **database**: The [database section](#database-section). 95 | - **runtimeConfig**: The [runtime section](#runtime-section). 96 | 97 | #### Database Section 98 | 99 | - **migrate**: A list of commands to run to apply your application's schema to the database. 100 | 101 | **Example**: 102 | 103 | ```yaml 104 | database: 105 | sys_db_name: 'my_dbos_system_db' 106 | migrate: 107 | - alembic upgrade head 108 | ``` 109 | 110 | #### Runtime Section 111 | 112 | - **start**: The command(s) with which to start your app. Called from [`dbos start`](../reference/cli.md#dbos-start), which is used to start your app in DBOS Cloud. 113 | - **setup**: Setup commands to run before your application is built in DBOS Cloud. Used only in DBOS Cloud. Documentation [here](../../production/dbos-cloud/application-management.md#customizing-microvm-setup). 114 | 115 | **Example**: 116 | 117 | ```yaml 118 | runtimeConfig: 119 | start: 120 | - "fastapi run" 121 | ``` 122 | 123 | ### Configuration Schema File 124 | 125 | There is a schema file available for the DBOS configuration file schema [on GitHub](https://github.com/dbos-inc/dbos-transact-py/blob/main/dbos/dbos-config.schema.json). 126 | This schema file can be used to provide an improved YAML editing experience for developer tools that leverage it. 127 | For example, the Visual Studio Code [RedHat YAML extension](https://marketplace.visualstudio.com/items?itemName=redhat.vscode-yaml) provides tooltips, statement completion and real-time validation for editing DBOS config files. 128 | This extension provides [multiple ways](https://github.com/redhat-developer/vscode-yaml#associating-schemas) to associate a YAML file with its schema. 129 | The easiest is to simply add a comment with a link to the schema at the top of the config file: 130 | 131 | ```yaml 132 | # yaml-language-server: $schema=https://github.com/dbos-inc/dbos-transact-py/blob/main/dbos/dbos-config.schema.json 133 | ``` 134 | -------------------------------------------------------------------------------- /docs/python/reference/dbos-class.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 1 3 | title: DBOS Class 4 | pagination_prev: null 5 | --- 6 | 7 | The DBOS class is a singleton—you must instantiate it (by calling its constructor) exactly once in a program's lifetime. 8 | Here, we document its constructor and lifecycle methods. 9 | Decorators are documented [here](./decorators.md) and context methods and variables [here](./contexts.md). 10 | 11 | ## class dbos.DBOS 12 | 13 | ```python 14 | DBOS( 15 | *, 16 | config: Optional[DBOSConfig] = None, 17 | fastapi: Optional[FastAPI] = None, 18 | flask: Optional[Flask] = None, 19 | conductor_key: Optional[str] = None, 20 | ) 21 | ``` 22 | 23 | **Parameters:** 24 | - `config`: Configuration parameters for DBOS. See the [configuration docs](./configuration.md). 25 | - `fastapi`: If your application is using FastAPI, the `FastAPI` object. If this is passed in, DBOS automatically calls [`dbos.launch`](#launch) when FastAPI is fully initialized. DBOS also adds to all routes a middleware that enables [tracing](../tutorials/logging-and-tracing.md#tracing) through FastAPI HTTP endpoints. 26 | - `flask`: If your application is using Flask, the `flask` object. If this is passed in, DBOS adds to all routes a middleware that enables [tracing](../tutorials/logging-and-tracing.md#tracing) through Flask HTTP endpoints. 27 | - `conductor_key`: An API key for [DBOS Conductor](../../production/self-hosting/conductor.md). If provided, application is connected to Conductor. API keys can be created from the [DBOS console](https://console.dbos.dev). 28 | 29 | 30 | ### launch 31 | 32 | ```python 33 | DBOS.launch() 34 | ``` 35 | 36 | Launch DBOS, initializing database connections and starting scheduled workflows. 37 | Should be called after all decorators run. 38 | **You should not call a DBOS function until after DBOS is launched.** 39 | If a FastAPI app is passed into the `DBOS` constructor, `launch` is called automatically during FastAPI setup. 40 | 41 | **Example:** 42 | ```python 43 | from dbos import DBOS 44 | 45 | # Initialize the DBOS object 46 | DBOS() 47 | 48 | # Define a scheduled workflow 49 | @DBOS.scheduled("* * * * *") 50 | @DBOS.workflow() 51 | def run_every_minute(scheduled_time: datetime, actual_time: datetime): 52 | DBOS.logger.info("This is a scheduled workflow!") 53 | 54 | # After all decorators run, launch DBOS 55 | DBOS.launch() 56 | ``` 57 | 58 | **Example using Flask:** 59 | ```python 60 | from flask import Flask 61 | from dbos import DBOS 62 | 63 | app = Flask(__name__) 64 | DBOS(flask=app) 65 | 66 | @app.route("/") 67 | @DBOS.workflow() 68 | def test_workflow(): 69 | return "

Workflow successful!

" 70 | 71 | # After all decorators run, launch DBOS 72 | DBOS.launch() 73 | 74 | if __name__ == "__main__": 75 | app.run() 76 | ``` 77 | 78 | Assuming your file is `main.py`, run with `python3 -m main` (dev) or `gunicorn -w 1 'main:app' -b 0.0.0.0:8000` (prod) 79 | 80 | ### destroy 81 | 82 | ```python 83 | DBOS.destroy( 84 | destroy_registry: bool = False 85 | ) 86 | ``` 87 | 88 | Destroy the DBOS singleton, terminating all active workflows and closing database connections. 89 | After this completes, the singleton can be re-initialized. 90 | Useful for testing. 91 | 92 | **Parameters:** 93 | - `destroy_registry`: Whether to destroy the global registry of decorated functions. If set to `True`, `destroy` will "un-register" all decorated functions. You probably want to leave this `False`. 94 | 95 | 96 | ### reset_system_database 97 | 98 | ```python 99 | DBOS.reset_system_database() 100 | ``` 101 | 102 | Destroy the DBOS [system database](../../explanations/how-workflows-work.md), resetting DBOS's internal state in Postgres. 103 | Useful when testing a DBOS application to reset the internal state of DBOS between tests. 104 | For example, see its use in the [testing tutorial](../tutorials/testing.md). 105 | **This is a destructive operation and should only be used in a test environment.** 106 | -------------------------------------------------------------------------------- /docs/python/reference/queues.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 4 3 | title: Queues 4 | --- 5 | 6 | Queues allow you to ensure that functions will be run, without starting them immediately. 7 | Queues are useful for controlling the number of functions run in parallel, or the rate at which functions are started. 8 | 9 | ### class dbos.Queue 10 | 11 | ```python 12 | Queue( 13 | name: str = None, 14 | concurrency: Optional[int] = None, 15 | limiter: Optional[Limiter] = None 16 | *, 17 | worker_concurrency: Optional[int] = None, 18 | priority_enabled: bool = False, 19 | ) 20 | 21 | class Limiter(TypedDict): 22 | limit: int 23 | period: float # In seconds 24 | ``` 25 | 26 | **Parameters:** 27 | - `name`: The name of the queue. Must be unique among all queues in the application. 28 | - `concurrency`: The maximum number of functions from this queue that may run concurrently. 29 | This concurrency limit is global across all DBOS processes using this queue. 30 | If not provided, any number of functions may run concurrently. 31 | - `limiter`: A limit on the maximum number of functions which may be started in a given period. 32 | - `worker_concurrency`: The maximum number of functions from this queue that may run concurrently on a given DBOS process. Must be less than or equal to `concurrency`. 33 | - `priority_enabled`: Enable setting priority for workflows on this queue. 34 | 35 | **Example syntax:** 36 | 37 | This queue may run no more than 10 functions concurrently and may not start more than 50 functions per 30 seconds: 38 | 39 | ```python 40 | queue = Queue("example_queue", concurrency=10, limiter={"limit": 50, "period": 30}) 41 | ``` 42 | 43 | 44 | ### enqueue 45 | 46 | ```python 47 | queue.enqueue( 48 | func: Callable[P, R], 49 | *args: P.args, 50 | **kwargs: P.kwargs, 51 | ) -> WorkflowHandle[R] 52 | ``` 53 | 54 | Enqueue a function for processing and return a [handle](./workflow_handles.md#workflowhandle) to it. 55 | You can enqueue any DBOS-annotated function. 56 | The `enqueue` method durably enqueues your function; after it returns your function is guaranteed to eventually execute even if your app is interrupted. 57 | 58 | **Example syntax:** 59 | 60 | ```python 61 | from dbos import DBOS, Queue 62 | 63 | queue = Queue("example_queue") 64 | 65 | @DBOS.step() 66 | def process_task(task): 67 | ... 68 | 69 | @DBOS.workflow() 70 | def process_tasks(tasks): 71 | task_handles = [] 72 | # Enqueue each task so all tasks are processed concurrently. 73 | for task in tasks: 74 | handle = queue.enqueue(process_task, task) 75 | task_handles.append(handle) 76 | # Wait for each task to complete and retrieve its result. 77 | # Return the results of all tasks. 78 | return [handle.get_result() for handle in task_handles] 79 | ``` 80 | 81 | ### enqueue_async 82 | 83 | ```python 84 | queue.enqueue_async( 85 | func: Callable[P, Coroutine[Any, Any, R]], 86 | *args: P.args, 87 | **kwargs: P.kwargs, 88 | ) -> WorkflowHandle[R] 89 | ``` 90 | 91 | Asynchronously enqueue an async function for processing and return an [async handle](./workflow_handles.md#workflowhandleasync) to it. 92 | You can enqueue any DBOS-annotated async function. 93 | The `enqueue_async` method durably enqueues your function; after it returns your function is guaranteed to eventually execute even if your app is interrupted. 94 | 95 | **Example syntax:** 96 | 97 | ```python 98 | from dbos import DBOS, Queue 99 | 100 | queue = Queue("example_queue") 101 | 102 | @DBOS.step() 103 | async def process_task_async(task): 104 | ... 105 | 106 | @DBOS.workflow() 107 | async def process_tasks(tasks): 108 | task_handles = [] 109 | # Enqueue each task so all tasks are processed concurrently. 110 | for task in tasks: 111 | handle = await queue.enqueue_async(process_task_async, task) 112 | task_handles.append(handle) 113 | # Wait for each task to complete and retrieve its result. 114 | # Return the results of all tasks. 115 | return [await handle.get_result() for handle in task_handles] 116 | ``` 117 | -------------------------------------------------------------------------------- /docs/python/reference/workflow_handles.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 5 3 | title: Workflow Handles 4 | description: API reference for DBOS workflow handles 5 | --- 6 | 7 | A workflow handle represents the state of a particular active or completed workflow execution. 8 | You obtain a workflow handle when using `DBOS.start_workflow` to start a workflow in the background. 9 | If you know a workflow's identity, you can also retrieve its handle using `DBOS.retrieve_workflow`. 10 | 11 | ## WorkflowHandle 12 | 13 | ### Methods 14 | 15 | #### get_workflow_id 16 | 17 | ```python 18 | handle.get_workflow_id() -> str 19 | ``` 20 | 21 | Retrieve the ID of the workflow. 22 | 23 | #### get_result 24 | 25 | ```python 26 | handle.get_result() -> R 27 | ``` 28 | 29 | Wait for the workflow to complete, then return its result. 30 | 31 | #### get_status 32 | 33 | ```python 34 | handle.get_status() -> WorkflowStatus 35 | ``` 36 | 37 | Retrieve the [`WorkflowStatus`](./contexts.md#workflow-status) of a workflow. 38 | 39 | 40 | ## WorkflowHandleAsync 41 | 42 | ### Methods 43 | 44 | #### get_workflow_id 45 | 46 | ```python 47 | handle.get_workflow_id() -> str 48 | ``` 49 | 50 | Retrieve the ID of the workflow. Behaves identically to the [WorkflowHandle](#workflowhandle) version. 51 | 52 | #### get_result 53 | 54 | ```python 55 | handle.get_result() -> Coroutine[Any, Any, R] 56 | ``` 57 | 58 | Asynchronously wait for the workflow to complete, then return its result. Similar to the [WorkflowHandle](#workflowhandle) version, except asynchronous. 59 | 60 | #### get_status 61 | 62 | ```python 63 | handle.get_status() -> Coroutine[Any, Any, WorkflowStatus] 64 | ``` 65 | 66 | Asynchronously retrieve the [`WorkflowStatus`](./contexts.md#workflow-status) of a workflow. 67 | -------------------------------------------------------------------------------- /docs/python/tutorials/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Tutorials", 3 | "position": 30 4 | } 5 | -------------------------------------------------------------------------------- /docs/python/tutorials/assets/ttdb-debug-breakpoint.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/python/tutorials/assets/ttdb-debug-breakpoint.png -------------------------------------------------------------------------------- /docs/python/tutorials/assets/ttdb-wfid-picker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/python/tutorials/assets/ttdb-wfid-picker.png -------------------------------------------------------------------------------- /docs/python/tutorials/assets/ttdbg-cloud-replay.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/python/tutorials/assets/ttdbg-cloud-replay.png -------------------------------------------------------------------------------- /docs/python/tutorials/assets/ttdbg-local-replay.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/python/tutorials/assets/ttdbg-local-replay.png -------------------------------------------------------------------------------- /docs/python/tutorials/assets/ttdbg-proxy-terminal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/python/tutorials/assets/ttdbg-proxy-terminal.png -------------------------------------------------------------------------------- /docs/python/tutorials/classes.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 15 3 | title: Working With Python Classes 4 | --- 5 | 6 | You can add DBOS workflow, step, and transaction decorators to your Python class instance methods. 7 | To add DBOS decorators to your methods, their class must inherit from `DBOSConfiguredInstance` and must be decorated with `@DBOS.dbos_class`. 8 | For example: 9 | 10 | ```python 11 | @DBOS.dbos_class() 12 | class URLFetcher(DBOSConfiguredInstance): 13 | def __init__(self, url: str): 14 | self.url = url 15 | super().__init__(config_name=url) 16 | 17 | @DBOS.workflow() 18 | def fetch_workflow(self): 19 | return self.fetch_url() 20 | 21 | @DBOS.step() 22 | def fetch_url(self): 23 | return requests.get(self.url).text 24 | 25 | example_fetcher = URLFetcher("https://example.com") 26 | print(example_fetcher.fetch_workflow()) 27 | ``` 28 | 29 | When you create a new instance of a DBOS-decorated class, `DBOSConfiguredInstance` must be instantiated with a `config_name`. 30 | This `config_name` should be a unique identifier of the instance. 31 | Additionally, all DBOS-decorated classes must be instantiated before `DBOS.launch()` is called. 32 | 33 | The reason for these requirements is to enable workflow recovery. 34 | When you create a new instance of a DBOS-decorated class, DBOS stores it in a global registry indexed by `config_name`. 35 | When DBOS needs to recover a workflow belonging to that class, it looks up the class instance using `config_name` so it can run the workflow using the right instance of its class. 36 | If `config_name` is not supplied, or if DBOS-decorated classes are dynamically instantiated after `DBOS.launch()`, then DBOS may not find the class instance it needs to recover a workflow. 37 | 38 | ### Static Methods and Class Methods 39 | 40 | You can add DBOS workflow, step, and transaction decorators to static methods and class methods of any class, even if it does not inherit from `DBOSConfiguredInstance`, because such methods do not access class instance variables. 41 | You must still decorate the class with `@DBOS.dbos_class()`. 42 | For example: 43 | 44 | ```python 45 | @DBOS.dbos_class() 46 | class ExampleClass() 47 | @staticmethod 48 | @DBOS.workflow() 49 | def staticmethod_workflow(): 50 | return 51 | 52 | @classmethod 53 | @DBOS.workflow() 54 | def classmethod_workflow(cls): 55 | return 56 | ``` -------------------------------------------------------------------------------- /docs/python/tutorials/debugging.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 18 3 | title: Debugging 4 | description: Learn how to debug your DBOS Python workflows. 5 | --- 6 | 7 | DBOS applications automatically save their state to Postgres every time a workflow step is executed. 8 | While this is primarily done for [reliablity and fault-tolerance](../../why-dbos.md), the saved state can also be used for debugging purposes. 9 | The DBOS Debugger enables you to replay the execution of your application workflows, step through the recorded states and identify the root cause of bugs more efficiently. 10 | 11 | ## Preliminaries 12 | 13 | The DBOS Debugger is an extension to [Visual Studio Code](https://code.visualstudio.com/) (aka VS Code), a free cross-platform interactive development environment. 14 | If you don't already have VS Code installed, please see [their official documentation](https://code.visualstudio.com/docs/setup/setup-overview) to get started. 15 | 16 | The DBOS Debugger can be installed from the [VS Code Marketplace website](https://marketplace.visualstudio.com/items?itemName=dbos-inc.dbos-ttdbg) 17 | or or by searching the [Extension Marketplace](https://code.visualstudio.com/docs/editor/extension-marketplace) inside VS Code for "DBOS". 18 | 19 | ![Installing the DBOS Debugger Extension Screenshot](../../assets/ttdbg-ext-install.png) 20 | 21 | Additionally, the official [VS Code Python extension](https://marketplace.visualstudio.com/items?itemName=ms-python.python) is needed to debug DBOS Python applications. 22 | For more information about this extension, please see the [official VS Code documentation](https://code.visualstudio.com/docs/python/python-quick-start). 23 | 24 | ## Local Replay Debugging 25 | 26 | Once the DBOS Debugger extension is installed, VS Code will display a [CodeLens](https://code.visualstudio.com/blogs/2017/02/12/code-lens-roundup) 27 | labeled `🔁 Replay Debug` on every [workflow function](./workflow-tutorial.md) in your application. 28 | 29 | ![Local Replay Debugging Code Lens](./assets/ttdbg-local-replay.png) 30 | 31 | The debugger will pick the database connection information from the `dbos-config.yaml` file. 32 | 33 | If you click on the Replay Debug CodeLens, a list of recent [workflow IDs](./workflow-tutorial#workflow-ids-and-idempotency) will be shown. 34 | You can filter the list of workflow IDs by entering a value in the text box above the list. 35 | If the workflow you wish to debug does not appear in the list, select the pencil icon in the upper right hand corner of the picker window to manually enter a workflow ID. 36 | 37 | ![Workflow ID picker](./assets/ttdb-wfid-picker.png) 38 | 39 | After selecting a workflow ID, the DBOS debugger will automatically launch your application with the [`dbos debug`](../reference/cli#dbos-debug) 40 | command with the VS Code Python debugger attached. 41 | From here, you can step through your DBOS workflow and inspect variables as you would when debugging any other Python application. 42 | 43 | :::info Note 44 | You can only step through workflow code when using the Replay Debugger. 45 | [Step](./step-tutorial.md) and [transaction](./transaction-tutorial.md) functions are skipped when Replay Debugging. 46 | The results returned from step and transaction functions when the workflow originally ran are retrieved and returned automatically without excecuting the function body. 47 | ::: 48 | 49 | ![Python debugger at breakpoint](./assets/ttdb-debug-breakpoint.png) 50 | 51 | ## Cloud Replay Debugging 52 | 53 | You can also replay debug DBOS applications deployed to DBOS Cloud. 54 | If your application is deployed to DBOS Cloud and you are logged into DBOS Cloud in the DBOS Debugger, you will see an additional 55 | `☁️ Cloud Replay Debug` CodeLens attached to your DBOS workflow functions. 56 | 57 | ![Cloud Replay Debugging Code Lens](./assets/ttdbg-cloud-replay.png) 58 | 59 | Cloud replay debugging works the same as local replay debugging. 60 | The only difference is the database your application connects to. 61 | When cloud replay debugging, the DBOS Debugger retrieves the DBOS Cloud database connection information 62 | and passes it to the `dbos debug` command via environment variables, overriding the database connection information in the `dbos-config.yaml` file. 63 | 64 | ## DBOS Console Integration 65 | 66 | When using the cloud replay debugger, an additional button appears in the upper right corner of the pick list next to the pencil icon. 67 | This button launches your default browser and navigates to the [DBOS Cloud Workflow Manager page](../../production/dbos-cloud/workflow-management.md). 68 | 69 | ![Workflow Picker with DBOS Console button](../../typescript/tutorials/assets/ttdbg-wfid-picker-with-console.png) 70 | 71 | In the DBOS Cloud Workflow Manager, a new control button appears that allows you to select the workflow that you wish to debug. 72 | Selecting the debug button in this view will navigate the user back to VS Code, where the debugger will launch the selected workflow. 73 | 74 | ![DBOS Cloud Workflow Manager with Debug control](../../typescript/tutorials/assets/console-debug-picker.png) 75 | 76 | :::info Note 77 | For your safety, VS Code will ask for confirmation before navigating to the Workflow Manager page and when navigating back to VS Code. 78 | If desired, you can configure VS Code not to prompt on these navigation events in the future. 79 | ::: 80 | 81 | 82 | ## Log Into to DBOS Cloud 83 | 84 | To login to DBOS Cloud in the DBOS Debugger, navigate to the DBOS Cloud view and select the `Log Into DBOS Cloud` menu item. 85 | 86 | ![Log Into DBOS Cloud](../../assets/ttdbg-login-cloud.png) 87 | 88 | Alternatively, you can open the VS Code [Command Palette](https://code.visualstudio.com/docs/getstarted/userinterface#_command-palette) 89 | and run the `DBOS: Log Into DBOS Cloud` command directly. 90 | 91 | ![Log Into DBOS Cloud Command Palette](../../assets/ttdbg-login-cmd-palette.png) -------------------------------------------------------------------------------- /docs/python/tutorials/kafka-integration.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 8 3 | title: Integrating with Kafka 4 | description: Overview of using DBOS with Kafka 5 | --- 6 | 7 | In this guide, you'll learn how to use DBOS transactions and workflows to process Kafka messages with exactly-once semantics. 8 | 9 | First, install [Confluent Kafka](https://docs.confluent.io/kafka-clients/python/current/overview.html) in your application: 10 | 11 | ``` 12 | pip install confluent-kafka 13 | ``` 14 | 15 | Then, define your transaction or workflow. It must take in a Kafka message as an input parameter: 16 | 17 | ```python 18 | from dbos import DBOS, KafkaMessage 19 | 20 | @DBOS.workflow() 21 | def test_kafka_workflow(msg: KafkaMessage): 22 | DBOS.logger.info(f"Message received: {msg.value.decode()}") 23 | ``` 24 | 25 | Then, annotate your function with a [`@DBOS.kafka_consumer`](../reference/decorators#kafka_consumer) decorator specifying which brokers to connect to and which topics to consume from. 26 | Configuration setting details are available from the 27 | [Confluent Kafka API docs](https://docs.confluent.io/platform/current/clients/confluent-kafka-python/html/index.html#pythonclient-configuration) and the 28 | [official Kafka documentation](https://kafka.apache.org/documentation/#consumerconfigs). 29 | At a minimum, you must specify [`bootstrap.servers`](https://kafka.apache.org/documentation/#consumerconfigs_bootstrap.servers) and 30 | [`group.id`](https://kafka.apache.org/documentation/#consumerconfigs_group.id) configuration settings. 31 | 32 | 33 | ```python 34 | from dbos import DBOS, KafkaMessage 35 | 36 | @DBOS.kafka_consumer( 37 | config={ 38 | "bootstrap.servers": "localhost:9092", 39 | "group.id": "dbos-kafka-group", 40 | }, 41 | topics=["example-topic"], 42 | ) 43 | @DBOS.workflow() 44 | def test_kafka_workflow(msg: KafkaMessage): 45 | DBOS.logger.info(f"Message received: {msg.value.decode()}") 46 | 47 | ``` 48 | 49 | Under the hood, DBOS constructs an [idempotency key](../tutorials/workflow-tutorial.md#workflow-ids-and-idempotency) for each Kafka message from its topic, partition, and offset and passes it into your workflow or transaction. 50 | This combination is guaranteed to be unique for each Kafka cluster. 51 | Thus, even if a message is delivered multiple times (e.g., due to transient network failures or application interruptions), your transaction or workflow processes it exactly once. 52 | 53 | ## In-Order Processing 54 | 55 | You can process Kafka events in-order by setting `in_order=True` in the `@DBOS.kafka_consumer` decorator. 56 | If this is set, messages are processed **sequentially** in order by offset. 57 | In other words, processing of Message #4 does not begin until Message #3 is fully processed. 58 | For example: 59 | 60 | ```python 61 | from dbos import DBOS, KafkaMessage 62 | 63 | @DBOS.kafka_consumer( 64 | config=config, 65 | topics=["example-topic"], 66 | in_order=True 67 | ) 68 | @DBOS.workflow() 69 | def process_messages_in_order(msg: KafkaMessage): 70 | DBOS.logger.info(f"Messages are processed sequentially in offset order") 71 | 72 | ``` -------------------------------------------------------------------------------- /docs/python/tutorials/logging-and-tracing.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 7 3 | title: Logging & Tracing 4 | --- 5 | 6 | ### Logging 7 | 8 | For convenience, DBOS provides a pre-configured logger for you to use available at [`DBOS.logger`](../reference/contexts.md#logger). 9 | For example: 10 | 11 | ```python 12 | DBOS.logger.info("Welcome to DBOS!") 13 | ``` 14 | 15 | You can [configure](../reference/configuration.md) the log level of this built-in logger through the DBOS constructor. 16 | This also configures the log level of the DBOS library. 17 | 18 | ```python 19 | config: DBOSConfig = { 20 | "name": "my-app" 21 | "log_level": "INFO" 22 | } 23 | DBOS(config=config) 24 | ``` 25 | 26 | 27 | ### Tracing 28 | 29 | DBOS automatically constructs [OpenTelemetry](https://opentelemetry.io/) traces of all workflows and their steps. 30 | If you are using FastAPI or Flask, it also automatically traces each HTTP request. 31 | 32 | DBOS constructs hierarchical [spans](https://opentelemetry.io/docs/concepts/signals/traces/#spans) for workflows and each of their steps. 33 | For example, if a FastAPI HTTP endpoint calls a workflow that calls a transaction, DBOS constructs a trace encompassing the entire request, with spans for the HTTP endpoint, the workflow, and the transaction. 34 | The transaction span is a child of the workflow span, which is a child of the HTTP endpoint span. 35 | You can access your current span via [`DBOS.span`](../reference/contexts.md#span). 36 | 37 | 38 | ### OpenTelemetry Export 39 | 40 | You can export DBOS traces and logs to any OpenTelemetry Protocol (OTLP)-compliant receiver. 41 | 42 | You can [configure](../reference/configuration.md) exporters through the DBOS constructor. 43 | For example: 44 | 45 | ```python 46 | config: DBOSConfig = { 47 | "name": "my-app" 48 | "otlp_traces_endpoints": ["http://localhost:4318/v1/traces"] 49 | "otlp_logs_endpoints": ["http://localhost:4318/v1/traces"] 50 | } 51 | DBOS(config=config) 52 | ``` 53 | 54 | 55 | For example, try using [Jaeger](https://www.jaegertracing.io/docs/latest/getting-started/) to visualize the traces of your local application. -------------------------------------------------------------------------------- /docs/python/tutorials/scheduled-workflows.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 6 3 | title: Scheduled Workflows 4 | description: Learn how to run DBOS workflows on a schedule. 5 | --- 6 | 7 | You can schedule DBOS [workflows](./workflow-tutorial.md) to run exactly once per time interval. 8 | To do this, annotate the workflow with the [`@DBOS.scheduled`](../reference/decorators.md#scheduled) decorator and specify the schedule in [crontab](https://en.wikipedia.org/wiki/Cron) syntax. For example: 9 | 10 | ```python 11 | @DBOS.scheduled('* * * * *') # crontab syntax to run once every minute 12 | @DBOS.workflow() 13 | def example_scheduled_workflow(scheduled_time: datetime, actual_time: datetime): 14 | DBOS.logger.info("I am a workflow scheduled to run once a minute. ") 15 | ``` 16 | 17 | Scheduled workflows must take in exactly two arguments: the time that the run was scheduled (as a `datetime`) and the time the run was actually started (as a `datetime`). Note that this means scheduled workflows should either be plain functions, or be `@staticmethod` class members. 18 | 19 | To learn more about crontab syntax, see [this guide](https://docs.gitlab.com/ee/topics/cron/) or [this crontab editor](https://crontab.guru/). DBOS uses [croniter](https://pypi.org/project/croniter/) to parse cron schedules, which is able to do second repetition and by default we use seconds as the first field. 20 | The specification for the DBOS variant can be found in the [decorator reference](../reference/decorators.md#scheduled). 21 | 22 | ### How Scheduling Works 23 | Under the hood, DBOS constructs an [idempotency key](./workflow-tutorial.md#workflow-ids-and-idempotency) for each workflow invocation. The key is a concatenation of the function name and the scheduled time, ensuring each scheduled invocation occurs exactly once while your application is active. -------------------------------------------------------------------------------- /docs/python/tutorials/step-tutorial.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 2 3 | title: Steps 4 | --- 5 | 6 | When using DBOS workflows, you should annotate any function that performs complex operations or accesses external APIs or services as a _step_. 7 | If a workflow is interrupted, upon restart it automatically resumes execution from the **last completed step**. 8 | 9 | You can turn **any** Python function into a step by annotating it with the [`@DBOS.step`](../reference/decorators.md#step) decorator. 10 | The only requirement is that its inputs and outputs should be serializable ([pickle](https://docs.python.org/3/library/pickle.html)-able). 11 | Here's a simple example: 12 | 13 | ```python 14 | @DBOS.step() 15 | def example_step(): 16 | return requests.get("https://example.com").text 17 | ``` 18 | 19 | You should make a function a step if you're using it in a DBOS workflow and it performs a [**nondeterministic**](../tutorials/workflow-tutorial.md#determinism) operation. 20 | A nondeterministic operation is one that may return different outputs given the same inputs. 21 | Common nondeterministic operations include: 22 | 23 | - Accessing an external API or service, like serving a file from [AWS S3](https://aws.amazon.com/s3/), calling an external API like [Stripe](https://stripe.com/), or accessing an external data store like [Elasticsearch](https://www.elastic.co/elasticsearch/). 24 | - Accessing files on disk. 25 | - Generating a random number. 26 | - Getting the current time. 27 | 28 | You **cannot** call, start, or enqueue workflows from within steps. 29 | You also cannot call DBOS methods like `DBOS.send` or `DBOS.set_event` from within steps. 30 | These operations should be performed from workflow functions. 31 | You can call one step from another step, but the called step becomes part of the calling step's execution rather than functioning as a separate step. 32 | 33 | ### Configurable Retries 34 | 35 | You can optionally configure a step to automatically retry any exception a set number of times with exponential backoff. 36 | This is useful for automatically handling transient failures, like making requests to unreliable APIs. 37 | Retries are configurable through arguments to the [step decorator](../reference/decorators.md#step): 38 | 39 | ```python 40 | DBOS.step( 41 | retries_allowed: bool = False, 42 | interval_seconds: float = 1.0, 43 | max_attempts: int = 3, 44 | backoff_rate: float = 2.0 45 | ) 46 | ``` 47 | 48 | For example, we configure this step to retry exceptions (such as if `example.com` is temporarily down) up to 10 times: 49 | 50 | ```python 51 | @DBOS.step(retries_allowed=True, max_attempts=10) 52 | def example_step(): 53 | return requests.get("https://example.com").text 54 | ``` 55 | 56 | If a step exhausts all `max_attempts` retries, it throws an exception (`DBOSMaxStepRetriesExceeded`) to the calling workflow. 57 | If that exception is not caught, the workflow [terminates](./workflow-tutorial.md#reliability-guarantees). 58 | 59 | ### Coroutine Steps 60 | 61 | You may also decorate coroutines (functions defined with `async def`, also known as async functions) with `@DBOS.step`. 62 | Coroutine steps can use Python's asynchronous language capabilities such as [await](https://docs.python.org/3/reference/expressions.html#await), [async for](https://docs.python.org/3/reference/compound_stmts.html#async-for) and [async with](https://docs.python.org/3/reference/compound_stmts.html#async-with). 63 | Like syncronous step functions, async steps suppport [configurable automatic retries](#configurable-retries) and require their inputs and outputs to be serializable. 64 | 65 | For example, here is an asynchronous version of the `example_step` function from above, using the [`aiohttp`](https://docs.aiohttp.org/en/stable/) library instead of [`requests`](https://requests.readthedocs.io/en/latest/). 66 | 67 | ```python 68 | @DBOS.step(retries_allowed=True, max_attempts=10) 69 | async def example_step(): 70 | async with aiohttp.ClientSession() as session: 71 | async with session.get("https://example.com") as response: 72 | return await response.text() 73 | ``` -------------------------------------------------------------------------------- /docs/python/tutorials/transaction-tutorial.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 3 3 | title: Transactions 4 | description: Learn how to perform database operations 5 | --- 6 | 7 | Transactions are a special type of [step](./step-tutorial.md) that are optimized for database accesses. 8 | They execute as a single [database transaction](https://en.wikipedia.org/wiki/Database_transaction). 9 | 10 | To make a Python function a transaction, annotate it with the [`@DBOS.transaction`](../reference/decorators.md#transaction) decorator. 11 | Then, access the database using the [`DBOS.sql_session`](../reference/contexts.md#sql_session) client, which is a [SQLAlchemy](https://www.sqlalchemy.org/) client DBOS automatically connects to your database. 12 | Here are some examples: 13 | 14 | 15 | 16 | 17 | ```python 18 | greetings = Table( 19 | "greetings", 20 | MetaData(), 21 | Column("name", String), 22 | Column("note", String) 23 | ) 24 | 25 | @DBOS.transaction() 26 | def example_insert(name: str, note: str) -> None: 27 | # Insert a new greeting into the database 28 | DBOS.sql_session.execute(greetings.insert().values(name=name, note=note)) 29 | 30 | @DBOS.transaction() 31 | def example_select(name: str) -> Optional[str]: 32 | # Select the first greeting to a particular name 33 | row = DBOS.sql_session.execute( 34 | select(greetings.c.note).where(greetings.c.name == name) 35 | ).first() 36 | return row[0] if row else None 37 | ``` 38 | 39 | 40 | 41 | 42 | ```python 43 | @DBOS.transaction() 44 | def example_insert(name: str, note: str) -> None: 45 | # Insert a new greeting into the database 46 | sql = text("INSERT INTO greetings (name, note) VALUES (:name, :note)") 47 | DBOS.sql_session.execute(sql, {"name": name, "note": note}) 48 | 49 | 50 | @DBOS.transaction() 51 | def example_select(name: str) -> Optional[str]: 52 | # Select the first greeting to a particular name 53 | sql = text("SELECT note FROM greetings WHERE name = :name LIMIT 1") 54 | row = DBOS.sql_session.execute(sql, {"name": name}).first() 55 | return row[0] if row else None 56 | ``` 57 | 58 | 59 | 60 | 61 | :::warning 62 | 63 | At this time, DBOS does not support coroutine transactions. 64 | Decorating an `async def` function with `@DBOS.transaction` will raise an error at runtime. 65 | 66 | ::: 67 | 68 | ## Schema Management 69 | 70 | We strongly recommend you manage your database schema using migrations. 71 | One popular database migration tool in Python is [Alembic](https://alembic.sqlalchemy.org/en/latest/). 72 | 73 | If you are using Alembic, you can generate a new migration with: 74 | 75 | ```shell 76 | alembic revision -m 77 | ``` 78 | 79 | This creates a new migration file in whose `upgrade` and `downgrade` functions you can implement your migration. 80 | For example: 81 | 82 | ```python 83 | def upgrade() -> None: 84 | op.execute(sa.text("CREATE TABLE greetings (name TEXT, note TEXT)")) 85 | 86 | 87 | def downgrade() -> None: 88 | op.execute(sa.text("DROP TABLE greetings")) 89 | ``` 90 | 91 | You can also generate new migrations directly from your SQLAlchemy schema files using [Alembic autogeneration](https://alembic.sqlalchemy.org/en/latest/autogenerate.html). -------------------------------------------------------------------------------- /docs/python/tutorials/workflow-management.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 5 3 | title: Workflow Management 4 | --- 5 | 6 | You can view and manage your durable workflow executions via a web UI ([self-hosted](../../production/self-hosting/workflow-management.md), [DBOS Cloud](../../production/dbos-cloud/workflow-management.md)), programmatically, or via command line. 7 | 8 | ## Listing Workflows 9 | 10 | You can list your application's workflows programmatically via [`DBOS.list_workflows`](../reference/contexts.md#list_workflows) or from the command line with [`dbos workflow list`](../reference/cli.md#dbos-workflow-list). 11 | 12 | You can also view a searchable and expandable list of your application's workflows from its page on the DBOS Console (either [self-hosted](../../production/self-hosting/workflow-management.md) or on [DBOS Cloud](../../production/dbos-cloud/workflow-management.md)). 13 | 14 | Workflow List 15 | 16 | ## Listing Workflow Steps 17 | 18 | You can list the steps of a workflow programmatically via [`DBOS.list_workflow_steps`](../reference/contexts.md#list_workflow_steps) or from the command line with [`dbos workflow steps`](../reference/cli.md#dbos-workflow-steps). 19 | 20 | You can also visualize a workflow's execution graph (including the workflow, its steps, and its child workflows and their steps) from its page on the DBOS Console (either [self-hosted](../../production/self-hosting/workflow-management.md) or on [DBOS Cloud](../../production/dbos-cloud/workflow-management.md)). 21 | For example, here is the graph of a workflow that processes multiple tasks concurrently by enqueueing child workflows: 22 | 23 | Workflow List 24 | 25 | ## Listing Enqueued Workflows 26 | 27 | You can list all **currently enqueued** workflows and steps of your application via [`DBOS.list_queued_workflows`](../reference/contexts.md#list_queued_workflows) or from the command line with [`dbos workflow queue list`](../reference/cli.md#dbos-workflow-queue-list). 28 | 29 | You can also view a searchable and expandable list of your application's currently enqueued workflows and steps from its page on the DBOS Console (either [self-hosted](../../production/self-hosting/workflow-management.md) or on [DBOS Cloud](../../production/dbos-cloud/workflow-management.md)). 30 | 31 | Workflow List 32 | 33 | ## Cancelling Workflows 34 | 35 | You can cancel the execution of a workflow from the web UI, programmatically via [`DBOS.cancel_workflow`](../reference/contexts.md#cancel_workflow), or through the command line with [`dbos workflow cancel`](../reference/cli.md#dbos-workflow-cancel). 36 | 37 | If the workflow is currently executing, cancelling it preempts its execution (interrupting it at the beginning of its next step). 38 | If the workflow is enqueued, cancelling removes it from the queue. 39 | 40 | ## Resuming Workflows 41 | 42 | You can resume a workflow from its last completed step from the web UI, programmatically via [`DBOS.resume_workflow`](../reference/contexts.md#resume_workflow), or through the command line with [`dbos workflow resume`](../reference/cli.md#dbos-workflow-resume). 43 | 44 | You can use this to resume workflows that are cancelled or that have exceeded their maximum recovery attempts. 45 | You can also use this to start an enqueued workflow immediately, bypassing its queue. 46 | 47 | ## Forking Workflows 48 | 49 | You can start a new execution of a workflow by **forking** it from a specific step. 50 | When you fork a workflow, DBOS generates a new workflow with a new workflow ID, copies to that workflow the original workflow's inputs and all its steps up to the selected step, then begins executing the new workflow from the selected step. 51 | 52 | Forking a workflow is useful for recovering from outages in downstream services (by forking from the step that failed after the outage is resolved) or for "patching" workflows that failed due to a bug in a previous application version (by forking from the bugged step to an appliation version on which the bug is fixed). 53 | 54 | You can fork a workflow programmatically using [`DBOS.fork_workflow`](../reference/contexts.md#fork_workflow). 55 | You can also fork a workflow from a step from the web UI by clicking on that step in the workflow's graph visualization: 56 | 57 | Workflow List -------------------------------------------------------------------------------- /docs/typescript/examples/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Example Applications", 3 | "position": 50 4 | } 5 | -------------------------------------------------------------------------------- /docs/typescript/examples/assets/alert_center_ui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/typescript/examples/assets/alert_center_ui.png -------------------------------------------------------------------------------- /docs/typescript/examples/assets/dbos-task-scheduler-main.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/typescript/examples/assets/dbos-task-scheduler-main.png -------------------------------------------------------------------------------- /docs/typescript/examples/assets/shop-guide-diagram-source.txt: -------------------------------------------------------------------------------- 1 | https://docs.google.com/drawings/d/1d7gxWWC1WAdk3UIq24_cgDVXalkpMNF-jZfNscUljc0/edit 2 | -------------------------------------------------------------------------------- /docs/typescript/integrating-dbos.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 20 3 | title: Add DBOS To Your App 4 | --- 5 | 6 | This guide shows you how to add the open-source [DBOS Transact](https://github.com/dbos-inc/dbos-transact-ts) library to your existing application to **durably execute** it and make it resilient to any failure. 7 | 8 | :::info 9 | Also check out the integration guides for popular TypeScript frameworks: 10 | - [Next.js + DBOS](../integrations/adding-dbos-to-next.md) 11 | - [Nest.js + DBOS](../integrations/nestjs.md) 12 | ::: 13 | 14 | ### Using DBOS Transact 15 | 16 | #### 1. Install DBOS 17 | 18 | `npm install` DBOS into your application. Note that DBOS requires Node.js 20 or later. 19 | 20 | ```shell 21 | npm install @dbos-inc/dbos-sdk@latest 22 | ``` 23 | 24 | Then, enable TypeScript decorators in your `tsconfig.json` file: 25 | 26 | ```json title="tsconfig.json" 27 | "compilerOptions": { 28 | "experimentalDecorators": true, 29 | } 30 | ``` 31 | 32 | DBOS requires a Postgres database. 33 | If you already have Postgres, you can set the `DBOS_DATABASE_URL` environment variable to your connection string (later we'll pass that value into DBOS). 34 | Otherwise, you can start Postgres in a Docker container with this command: 35 | 36 | ```shell 37 | npx dbos postgres start 38 | ``` 39 | 40 | 41 | #### 2. Initialize DBOS in Your App 42 | 43 | In your app's main entrypoint, add the following code. 44 | This initializes DBOS when your app starts. 45 | 46 | ```javascript 47 | import { DBOS } from "@dbos-inc/dbos-sdk"; 48 | 49 | DBOS.setConfig({ 50 | "name": "my-app", 51 | "databaseUrl": process.env.DBOS_DATABASE_URL 52 | }); 53 | await DBOS.launch(); 54 | ``` 55 | 56 | #### 3. Start Your Application 57 | 58 | Try starting your application. 59 | If everything is set up correctly, your app should run normally, but log `DBOS launched!` on startup. 60 | Congratulations! You've integrated DBOS into your application. 61 | 62 | #### 4. Start Building With DBOS 63 | 64 | At this point, you can add any DBOS decorator or method to your application. 65 | For example, you can annotate one of your functions as a [workflow](./tutorials/workflow-tutorial.md) and the functions it calls as [steps](./tutorials/step-tutorial.md). 66 | DBOS durably executes the workflow so if it is ever interrupted, upon restart it automatically resumes from the last completed step. 67 | 68 | ```typescript 69 | export class Example { 70 | 71 | @DBOS.step() 72 | static async myStep(n) { 73 | DBOS.logger.info(`Step ${n} completed!`); 74 | } 75 | 76 | @DBOS.workflow() 77 | static async exampleWorkflow() { 78 | await Example.myStep(1); 79 | await Example.myStep(2); 80 | } 81 | } 82 | ``` 83 | 84 | To ensure that DBOS registers all decorated functions, **declare all DBOS-decorated functions before running `await DBOS.launch()`.** 85 | 86 | You can add DBOS to your application incrementally—it won't interfere with code that's already there. 87 | It's totally okay for your application to have one DBOS workflow alongside thousands of lines of non-DBOS code. 88 | 89 | To learn more about programming with DBOS, check out [the programming guide](./programming-guide.md). 90 | -------------------------------------------------------------------------------- /docs/typescript/reference/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Reference", 3 | "position": 40 4 | } 5 | -------------------------------------------------------------------------------- /docs/typescript/reference/tools/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Tools", 3 | "position": 30 4 | } 5 | -------------------------------------------------------------------------------- /docs/typescript/reference/tools/assets/ttdbg-proxy-terminal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/typescript/reference/tools/assets/ttdbg-proxy-terminal.png -------------------------------------------------------------------------------- /docs/typescript/reference/tools/dbos-compiler.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 30 3 | title: DBOS Compiler 4 | description: DBOS Compiler reference 5 | --- 6 | 7 | The DBOS Compiler helps you deploy your DBOS applications with Stored Procedures to PostgreSQL (including your DBOS Cloud database). 8 | 9 | ## Installation 10 | 11 | To install the latest Cloud CLI version for your application, run the following command in your package root: 12 | 13 | ``` 14 | npm install --save-dev @dbos-inc/dbos-compiler 15 | ``` 16 | 17 | ## Stored Procedure Versioning 18 | 19 | Since [@StoredProcedure](../../tutorials/stored-proc-tutorial.md) functions must be tied to a specific [application version](../../tutorials/workflow-tutorial.md#workflow-versioning-and-recovery), both DBOS Transact and the DBOS Compiler are version aware. 20 | By default, the application version is specified via the `DBOS__APPVERSION` environment variable, but can also be controlled via command line parameters. 21 | When a application version is specified, the DBOS Compiler will automatically prefix generated stored procedures with `v` and the application version. 22 | Likewise, DBOS Transact will automatically invoke prefixed versions of the deployed stored procedures when the application version is specified. 23 | For example, if the application version was `1720214256655`, all of the stored procedures generated by would be prefixed with `v1720214256655_`. 24 | 25 | :::info 26 | DBOS Cloud cleans up the database deployed stored procedures for older application versions via a background task. 27 | You do not need to cleanup DBOS cloud deployed stored procedure versions yourself. 28 | ::: 29 | 30 | ## Commands 31 | 32 | --- 33 | 34 | ### `npx dbosc deploy` 35 | 36 | **Description:** 37 | This command deploys the stored procedure functions from a DBOS application to the PostgreSQL database specified in the [configuration file](../configuration). 38 | You must deploy your stored procedures to the database before running your DBOS application. 39 | 40 | **Arguments:** 41 | - `tsconfigPath`: path to the DBOS application's [tsconfig.json file](https://www.typescriptlang.org/docs/handbook/tsconfig-json.html). 42 | If this argument is not provided, `dbosc` will use the tsconfig.json file from the appDirectory (if specified) or the current directory. 43 | 44 | **Arguments:** 45 | - `-d, --appDir `: The path to your application root directory. 46 | - `--app-version ` / `--no-app-version`: Overrides the `DBOS__APPVERSION` environment variable. 47 | For more details, see [Stored Procedure Versioning](#stored-procedure-versioning) above. 48 | 49 | --- 50 | 51 | ### `npx dbosc drop` 52 | This command drops the DBOS application's stored procedures from the PostgreSQL database specified in the [configuration file](../configuration). 53 | 54 | **Arguments:** 55 | - `tsconfigPath`: path to the DBOS application's [tsconfig.json file](https://www.typescriptlang.org/docs/handbook/tsconfig-json.html). 56 | If this argument is not provided, `dbosc` will use the tsconfig.json file from the appDirectory (if specified) or the current directory. 57 | 58 | **Arguments:** 59 | - `-d, --appDir `: The path to your application root directory. 60 | - `--app-version ` / `--no-app-version`: Overrides the `DBOS__APPVERSION` environment variable. 61 | For more details, see [Stored Procedure Versioning](#stored-procedure-versioning) above. 62 | 63 | --- 64 | 65 | ### `npx dbosc compile` 66 | This command generates `create.sql` and `drop.sql` files containing the SQL commands to deploy or drop the DBOS application stored procedures. 67 | This command can be useful to integrate DBOS into an environment where you can't deploy stored procedures to the database automatically with `dbosc deploy`. 68 | 69 | :::warning 70 | This command will overwrite existing `create.sql` and `drop.sql` files in the output directory. 71 | ::: 72 | 73 | **Arguments:** 74 | - `tsconfigPath`: path to the DBOS application's [tsconfig.json file](https://www.typescriptlang.org/docs/handbook/tsconfig-json.html). 75 | If this argument is not provided, `dbosc` will use the tsconfig.json file from the current directory. 76 | 77 | **Arguments:** 78 | - `-o, --out `: The path of the directory where the compiler will generate the `create.sql` and `drop.sql` files. Defaults to current directory if not specified. 79 | - `--app-version ` / `--no-app-version`: Overrides the `DBOS__APPVERSION` environment variable. 80 | For more details, see [Stored Procedure Versioning](#stored-procedure-versioning) above. 81 | -------------------------------------------------------------------------------- /docs/typescript/reference/transactapi/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "DBOS Transact API Reference", 3 | "position": 10 4 | } 5 | -------------------------------------------------------------------------------- /docs/typescript/reference/transactapi/workflow-handles.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 20 3 | title: Workflow Handles 4 | description: API reference for workflow handles 5 | --- 6 | 7 | A workflow handle represents the state of a particular active or completed workflow execution. 8 | A workflow handle is obtained when a workflow is started with [`DBOS.startWorkflow`](./dbos-class#starting-background-workflows). 9 | Additionally, a handle can be retrieved by calling [`DBOS.retrieveWorkflow`](./dbos-class#dbosretrieveworkflow) with the workflow's [unique ID](../../tutorials/workflow-tutorial#workflow-ids-and-idempotency). 10 | 11 | --- 12 | 13 | ### Methods 14 | 15 | #### `getStatus(): Promise` 16 | 17 | Retrieves the status of a workflow with the following structure: 18 | 19 | ```typescript 20 | export interface WorkflowStatus { 21 | readonly status: string; // The status of the workflow. One of PENDING, SUCCESS, ERROR, RETRIES_EXCEEDED, or CANCELLED. 22 | readonly workflowName: string; // The name of the workflow function. 23 | readonly authenticatedUser: string; // The user who ran the workflow. Empty string if not set. 24 | readonly assumedRole: string; // The role used to run this workflow. Empty string if authorization is not required. 25 | readonly authenticatedRoles: string[]; // All roles the authenticated user has, if any. 26 | readonly request: HTTPRequest; // The parent request for this workflow, if any. 27 | } 28 | ``` 29 | 30 | #### `getResult(): Promise` 31 | 32 | Waits for the workflow to complete then returns its output. 33 | 34 | #### `workflowID: string` 35 | 36 | Retrieves the workflow's [unique ID](../../tutorials/workflow-tutorial#workflow-ids-and-idempotency). 37 | 38 | #### `getWorkflowInputs(): Promise` 39 | 40 | Retrieves the worklow's input argument array. -------------------------------------------------------------------------------- /docs/typescript/reference/transactapi/workflow-queues.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 30 3 | title: Workflow Queues 4 | --- 5 | 6 | Workflow queues allow you to ensure that workflow functions will be run, without starting them immediately. 7 | Queues are useful for controlling the number of workflows run in parallel, or the rate at which they are started. 8 | 9 | Queues should be created when your application module is loaded. This ensures that the queue information is available for the DBOS runtime when it is initialized, and begins handling the queued workflow records. 10 | 11 | ### class WorkflowQueue 12 | 13 | ```typescript 14 | interface QueueRateLimit { 15 | limitPerPeriod: number; 16 | periodSec: number; 17 | } 18 | 19 | interface QueueParameters { 20 | workerConcurrency?: number; 21 | concurrency?: number; 22 | rateLimit?: QueueRateLimit; 23 | } 24 | 25 | class WorkflowQueue { 26 | constructor(name: string, queueParameters: QueueParameters); 27 | } 28 | ``` 29 | 30 | **Parameters:** 31 | - `name`: The name of the queue. Must be unique among all queues in the application. 32 | - `concurrency`: The maximum number of workflows from this queue that may run concurrently. Defaults to no limit. 33 | This concurrency limit is global across all DBOS processes using this queue. 34 | - `workerConcurrency`: The maximum number of workflows from this queue that may run concurrently within a single DBOS process. Must be less than or equal to `concurrency`. 35 | - `rateLimit`: A limit on the maximum number of functions which may be started in a given period. 36 | - `rateLimit.limitPerPeriod`: The number of workflows that may be started within the specified time period. 37 | - `rateLimit.periodSec`: The time period across which `limitPerPeriod` applies. 38 | 39 | **Example syntax:** 40 | 41 | This queue may run no more than 10 functions concurrently and may not start more than 50 functions per 30 seconds: 42 | 43 | ```typescript 44 | const queue = new WorkflowQueue( 45 | "example_queue", 46 | { 47 | concurrency: 10, 48 | workerConcurrency: 5, 49 | rateLimit: { limitPerPeriod: 50, periodSec: 30 } 50 | }, 51 | ); 52 | ``` 53 | 54 | 55 | ### Enqueueing Workflows 56 | 57 | Workflows are enqueued with the [`DBOS.startWorkflow`](./dbos-class#starting-background-workflows) function, by providing a `queueName` argument. 58 | 59 | This enqueues a function for processing and returns a [handle](./workflow-handles.md) to it. 60 | 61 | The `DBOS.startWorkflow` method durably enqueues your function; after it returns, your function is guaranteed to eventually execute even if your app is interrupted. 62 | 63 | **Example syntax:** 64 | 65 | ```javascript 66 | import { DBOS, WorkflowQueue } from "@dbos-inc/dbos-sdk"; 67 | 68 | const queue = new WorkflowQueue("example_queue"); 69 | 70 | class Tasks { 71 | @DBOS.workflow() 72 | static async processTask(task) { 73 | // ... 74 | } 75 | 76 | @DBOS.workflow() 77 | static async processTasks(tasks) { 78 | const handles = [] 79 | 80 | // Enqueue each task so all tasks are processed concurrently. 81 | for (const task of tasks) { 82 | handles.push(await DBOS.startWorkflow(Tasks, {queueName: queue.name}).processTask(task)); 83 | } 84 | 85 | // Wait for each task to complete and retrieve its result. 86 | // Return the results of all tasks. 87 | const results = []; 88 | for (const h of handles) { 89 | results.push(await h.getResult()); 90 | } 91 | return results; 92 | } 93 | } 94 | ``` 95 | -------------------------------------------------------------------------------- /docs/typescript/tutorials/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Tutorials", 3 | "position": 30 4 | } 5 | -------------------------------------------------------------------------------- /docs/typescript/tutorials/assets/console-debug-picker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/typescript/tutorials/assets/console-debug-picker.png -------------------------------------------------------------------------------- /docs/typescript/tutorials/assets/ttdb-debug-breakpoint.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/typescript/tutorials/assets/ttdb-debug-breakpoint.png -------------------------------------------------------------------------------- /docs/typescript/tutorials/assets/ttdb-wfid-picker.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/typescript/tutorials/assets/ttdb-wfid-picker.png -------------------------------------------------------------------------------- /docs/typescript/tutorials/assets/ttdbg-cloud-replay.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/typescript/tutorials/assets/ttdbg-cloud-replay.png -------------------------------------------------------------------------------- /docs/typescript/tutorials/assets/ttdbg-local-replay.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/typescript/tutorials/assets/ttdbg-local-replay.png -------------------------------------------------------------------------------- /docs/typescript/tutorials/assets/ttdbg-time-travel.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/typescript/tutorials/assets/ttdbg-time-travel.png -------------------------------------------------------------------------------- /docs/typescript/tutorials/assets/ttdbg-wfid-picker-with-console.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/dbos-inc/dbos-docs/29bb2b40bf75ebbe83a05b4bf1d11f665e2c8980/docs/typescript/tutorials/assets/ttdbg-wfid-picker-with-console.png -------------------------------------------------------------------------------- /docs/typescript/tutorials/authentication-authorization.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 60 3 | title: Authentication & Authorization 4 | --- 5 | 6 | This section covers declarative authentication and authorization in DBOS. 7 | 8 | DBOS supports modular, built-in declarative security: you can use the [`@Authentication`](./requestsandevents/http-serving-tutorial#authentication) class decorator to make user identities available to DBOS contexts. Further, you can associate operations with a list of permitted roles using the [`@DBOS.requiredRole`](../reference/transactapi/dbos-class.md#dbosrequiredrole) API. 9 | 10 | :::info note 11 | You can fully implement authentication and authorization using custom [HTTP middleware](./requestsandevents/http-serving-tutorial#middleware) which will run before the request reaches the handler. This section describes mechanisms DBOS provides to make it easier. 12 | ::: 13 | 14 | ## Authentication Middleware 15 | To instruct DBOS to perform authentication for an HTTP endpoint, you can use the [`@Authentication`](./requestsandevents/http-serving-tutorial#authentication) class decorator to register HTTP middleware with your custom authentication logic (for example validating a [JSON Web Token](https://jwt.io/) and retrieving user credentials and permissions from the decoded token). 16 | The decorator should return a structure containing identity and claimed roles: 17 | 18 | ```javascript 19 | return { 20 | authenticatedUser: "Mary", 21 | authenticatedRoles: ["user", "admin"], 22 | }; 23 | ``` 24 | 25 | When serving a request from an HTTP endpoint, DBOS runs the authentication middleware before running the requested operation and makes this information available in the [context](../reference/transactapi/dbos-class#accessing-http-context). 26 | 27 | ## Authorization Decorators 28 | To declare a list of roles that are authorized to run the methods in a class, use the [`@DBOS.defaultRequiredRole`](../reference/transactapi/dbos-class.md#dbosdefaultrequiredrole) class decorator: 29 | 30 | ```javascript 31 | @DBOS.defaultRequiredRole(['user']) 32 | class Operations 33 | { 34 | // Most operations will be user-level 35 | } 36 | ``` 37 | 38 | At runtime, before running an operation, DBOS verifies that the operation context contains an authenticated role listed in its required roles. 39 | For exceptions, requiring more or less privilege than the default, you can specify [`@DBOS.requiredRole`](../reference/transactapi/dbos-class#dbosrequiredrole) at the method level 40 | 41 | ```javascript 42 | @DBOS.defaultRequiredRole(['user']) 43 | class Operations 44 | { 45 | // Most operations will be user-level 46 | 47 | // Registering a new user doesn't require privilege 48 | @DBOS.requiredRole([]) 49 | static async doRegister(firstName: string, lastName: string){} 50 | 51 | // Deleting a user requires escalated privilege 52 | @DBOS.requiredRole(['admin']) 53 | static async deleteOtherUser(otherUser: string){} 54 | } 55 | ``` 56 | 57 | ## Example 58 | In this example, we demonstrate how to use DBOS declarative security: 59 | 60 | ```javascript 61 | // Resolve request identity using HTTP headers. 62 | // You can replace this logic with robust methods such as JWT. 63 | const authenticationMiddleware = (ctx: MiddlewareContext) => { 64 | return { 65 | // Extract username from headers 66 | authenticatedUser: ctx.koaContext?.header.username, 67 | // Attribute role "appUser" to incoming requests 68 | authenticatedRoles: ["appUser"], 69 | }; 70 | }; 71 | 72 | @Authentication(authenticationMiddleware) 73 | @DBOS.defaultRequiredRole("appUser") 74 | export class Hello { 75 | ... 76 | } 77 | ``` 78 | 79 | Here, we instruct the `Hello` class to run `authenticationMiddleware` on all incoming HTTP requests. 80 | We require requests to authenticate with the `appUser` role to reach any HTTP handler declared in `Hello`. 81 | The authentication function simply parses the username from the HTTP headers. 82 | You can replace this with a more robust authentication method, such as [JSON Web Tokens](https://jwt.io/). 83 | 84 | For applications that manage their own users, it is possible to access the database in a read-only way from the `MiddlewareContext` (Knex shown): 85 | 86 | ```typescript 87 | static async authMiddlware(ctx: MiddlewareContext) { 88 | if (!ctx.requiredRole || !ctx.requiredRole.length) { 89 | return; 90 | } 91 | const {user} = ctx.koaContext.query; 92 | if (!user) { 93 | throw new DBOSNotAuthorizedError("User not provided", 401); 94 | } 95 | const u = await ctx.query( 96 | (dbClient: Knex, uname: string) => { 97 | return dbClient(userTableName).select("username").where({ username: uname }) 98 | }, user as string); 99 | 100 | if (!u || !u.length) { 101 | throw new DBOSNotAuthorizedError("User does not exist", 403); 102 | } 103 | 104 | // NOTE: Validate credentials against database 105 | 106 | ctx.logger.info(`Allowed in user: ${u[0].username}`); 107 | return { 108 | authenticatedUser: u[0].username!, 109 | authenticatedRoles: ["user"], 110 | }; 111 | } 112 | 113 | ``` 114 | 115 | -------------------------------------------------------------------------------- /docs/typescript/tutorials/debugging.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 105 3 | title: Debugging 4 | description: Learn how to debug your DBOS TypeScript workflows. 5 | --- 6 | 7 | DBOS applications automatically save their state to Postgres every time a workflow step is executed. 8 | While this is primarily done for [reliablity and fault-tolerance](../../why-dbos.md), the saved state can also be used for debugging purposes. 9 | The DBOS Debugger enables you to replay the execution of your application workflows, step through the recorded states and identify the root cause of bugs more efficiently. 10 | 11 | ## Preliminaries 12 | 13 | The DBOS Debugger is an extension to [Visual Studio Code](https://code.visualstudio.com/) (aka VS Code), a free cross-platform interactive development environment. 14 | If you don't already have VS Code installed, please see [their official documentation](https://code.visualstudio.com/docs/setup/setup-overview) to get started. 15 | 16 | The DBOS Debugger can be installed from the [VS Code Marketplace website](https://marketplace.visualstudio.com/items?itemName=dbos-inc.dbos-ttdbg) 17 | or or by searching the [Extension Marketplace](https://code.visualstudio.com/docs/editor/extension-marketplace) inside VS Code for "DBOS". 18 | 19 | ![Installing the DBOS Debugger Extension Screenshot](../../assets/ttdbg-ext-install.png) 20 | 21 | ## Local Replay Debugging 22 | 23 | Once the DBOS Debugger extension is installed, VS Code will display a [CodeLens](https://code.visualstudio.com/blogs/2017/02/12/code-lens-roundup) 24 | labeled `🔁 Replay Debug` on every [workflow method](./workflow-tutorial.md) in your application. 25 | 26 | ![Local Replay Debugging Code Lens](./assets/ttdbg-local-replay.png) 27 | 28 | The debugger will pick the database connection information from the `dbos-config.yaml` file. 29 | 30 | If you click on the Replay Debug CodeLens, a list of recent [workflow IDs](./workflow-tutorial#workflow-ids-and-idempotency) will be shown. 31 | You can filter the list of workflow IDs by entering a value in the text box above the list. 32 | If the workflow you wish to debug does not appear in the list, select the pencil icon in the upper right hand corner of the picker window to manually enter a workflow ID. 33 | 34 | ![Workflow ID picker](./assets/ttdb-wfid-picker.png) 35 | 36 | After selecting a workflow ID, the DBOS debugger will automatically launch your application with the [`npx dbos debug`](../reference/tools/cli.md#npx-dbos-debug) 37 | command with the VS Code TypeScript debugger attached. 38 | From here, you can step through your DBOS workflow and inspect variables as you would when debugging any other TypeScript application. 39 | 40 | :::info Note 41 | You can only step through workflow code when using the Replay Debugger. 42 | [Step](./step-tutorial.md) and [transaction](./transaction-tutorial.md) methods are skipped when Replay Debugging. 43 | The results returned from step and transaction methods when the workflow originally ran are retrieved and returned automatically without executing the function body. 44 | ::: 45 | 46 | ![TypeScript debugger at breakpoint](./assets/ttdb-debug-breakpoint.png) 47 | 48 | ## Cloud Replay Debugging 49 | 50 | You can also replay debug DBOS applications deployed to DBOS Cloud. 51 | If your application is deployed to DBOS Cloud and you are logged into DBOS Cloud in the DBOS Debugger, you will see an additional 52 | `☁️ Cloud Replay Debug` CodeLens attached to your DBOS workflow methods. 53 | 54 | ![Cloud Replay Debugging Code Lens](./assets/ttdbg-cloud-replay.png) 55 | 56 | Cloud replay debugging works the same as local replay debugging. 57 | The only difference is the database your application connects to. 58 | When cloud replay debugging, the DBOS Debugger retrieves the DBOS Cloud database connection information 59 | and passes it to the `dbos debug` command via environment variables, 60 | overriding the database connection information in the `dbos-config.yaml` file. 61 | 62 | ## DBOS Console Integration 63 | 64 | When using the cloud replay debugger, an additional button appears in the upper right corner of the pick list next to the pencil icon. 65 | This button launches your default browser and navigates to the [DBOS Cloud Workflow Manager page](../../production/dbos-cloud/workflow-management.md). 66 | 67 | ![Workflow Picker with DBOS Console button](./assets/ttdbg-wfid-picker-with-console.png) 68 | 69 | In the DBOS Cloud Workflow Manager, a new control button appears that allows you to select the workflow that you wish to debug. 70 | Selecting the debug button in this view will navigate the user back to VS Code, where the debugger will launch the selected workflow. 71 | 72 | ![DBOS Cloud Workflow Manager with Debug control](./assets/console-debug-picker.png) 73 | 74 | :::info Note 75 | For your safety, VS Code will ask for confirmation before navigating to the Workflow Manager page and when navigating back to VS Code. 76 | If desired, you can configure VS Code not to prompt on these navigation events in the future. 77 | ::: 78 | 79 | ## Log Into to DBOS Cloud 80 | 81 | To login to DBOS Cloud in the DBOS Debugger, navigate to the DBOS Cloud view and select the `Log Into DBOS Cloud` menu item. 82 | 83 | ![Log Into DBOS Cloud](../../assets/ttdbg-login-cloud.png) 84 | 85 | Alternatively, you can open the VS Code [Command Palette](https://code.visualstudio.com/docs/getstarted/userinterface#_command-palette) 86 | and run the `DBOS: Log Into DBOS Cloud` command directly. 87 | 88 | ![Log Into DBOS Cloud Command Palette](../../assets/ttdbg-login-cmd-palette.png) -------------------------------------------------------------------------------- /docs/typescript/tutorials/development/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Software Development With DBOS", 3 | "position": 300 4 | } 5 | -------------------------------------------------------------------------------- /docs/typescript/tutorials/development/using-libraries.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 60 3 | title: Using Libraries 4 | description: Learn how to use DBOS library functions 5 | --- 6 | 7 | In this guide, you'll learn how to use DBOS library functions. Examples will be based on [`@dbos-inc/dbos-email-ses`](https://www.npmjs.com/package/@dbos-inc/dbos-email-ses), a DBOS library for sending emails using [AWS Simple Email Service](https://aws.amazon.com/ses/). 8 | 9 | ### Installing and Importing a Library 10 | 11 | First, install the library. 12 | ```bash 13 | npm install @dbos-inc/dbos-email-ses 14 | ``` 15 | 16 | Second, import the key classes from the library for use in your source files: 17 | ```typescript 18 | import { DBOS_SES } from "@dbos-inc/dbos-email-ses"; 19 | ``` 20 | 21 | ### Calling Simple Functions 22 | Libraries such as `@dbos-inc/dbos-bcrypt` or `@dbos-inc/dbos-datetime` are comprised of functions that can be invoked from their classes. Such functions may be called directly: 23 | ```typescript 24 | BcryptStep.bcryptHash('myString'); 25 | ``` 26 | 27 | ### Working With Instantiated Objects 28 | While libraries such as `@dbos-inc/dbos-bcrypt` or `@dbos-inc/dbos-datetime` have simple functions that can be called directly from their classes, more complex DBOS libraries use ["instantiated objects"](../instantiated-objects) so that they can be used in multiple scenarios within the same application. To create and configure an instance: 29 | 30 | ```typescript 31 | import { DBOS } from "@dbos-inc/dbos-sdk"; 32 | import { DBOS_SES } from "@dbos-inc/dbos-email-ses"; 33 | 34 | const sesMailer = new DBOS_SES('marketing', {awscfgname: 'marketing_email_aws_config'}); 35 | ``` 36 | 37 | Methods can then be called on the object instance: 38 | ```typescript 39 | sesMailer.sendEmail( 40 | { 41 | to: [DBOS.getConfig('marketing_mailing_list_address', 'dbos@nowhere.dev')], 42 | from: DBOS.getConfig('marketing_from_address', 'info@dbos.dev'), 43 | subject: 'New SES Library Version Released', 44 | bodyText: 'Check mailbox to see if this library is able to send mail about itself.', 45 | // ... 46 | }, 47 | ``` 48 | -------------------------------------------------------------------------------- /docs/typescript/tutorials/instantiated-objects.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 85 3 | title: Using Typescript Objects 4 | description: Learn how to make workflows, transactions, and steps reusable and configurable by instantiating objects 5 | --- 6 | 7 | You can add DBOS workflow, step, and transaction decorators to your TypeScript class instance methods. 8 | To add DBOS decorators to your instance methods, their class must inherit from `ConfiguredInstance`, which will take an instance name and register the instance. 9 | 10 | For example: 11 | ```typescript 12 | class MyClass extends ConfiguredInstance { 13 | cfg: MyConfig; 14 | constructor(name: string, config: MyConfig) { 15 | super(name); 16 | this.cfg = cfg; 17 | } 18 | 19 | override async initialize() : Promise { 20 | // ... Validate this.cfg 21 | } 22 | 23 | @DBOS.transaction() 24 | async testTransaction() { 25 | // ... Operations that use this.cfg 26 | } 27 | 28 | @DBOS.step() 29 | async testStep() { 30 | // ... Operations that use this.cfg 31 | } 32 | 33 | @DBOS.workflow() 34 | async testWorkflow(p: string): Promise { 35 | // ... Operations that use this.cfg 36 | } 37 | } 38 | 39 | const myClassInstance = new MyClass('instanceA'); 40 | ``` 41 | 42 | When you create a new instance of a DBOS-decorated class, the constructor for the base `ConfiguredInstance` must be called with a `name`. This `name` should be unique among instances of the same class. Additionally, all `ConfiguredInstance` classes must be instantiated before DBOS.launch() is called. 43 | 44 | The reason for these requirements is to enable workflow recovery. When you create a new instance of, DBOS stores it in a global registry indexed by `name`. When DBOS needs to recover a workflow belonging to that class, it looks up the `name` so it can run the workflow using the right class instance. While names are used by DBOS Transact internally to find the correct object instance across system restarts, they are also potentially useful for monitoring, tracing, and debugging. 45 | 46 | ## Using Configured Class Instances 47 | Configured class instances should be created and named when the application starts, before any workflows run. This ensures that they will all be initialized before any processing begins. 48 | 49 | ### Writing New Configured Classes 50 | All configured classes: 51 | * Extend from the `ConfiguredInstance` base class 52 | * Provide a constructor, which can take any arguments, but must provide a name to the base `ConfiguredInstance` constructor 53 | * May have an `initialize()` method that will be called after all objects have been created, but before request handling commences 54 | 55 | ### `initialize()` Method 56 | The `initialize()` method will be called during application initialization, after the code modules have been loaded, but before request and workflow processing commences. [`DBOS`](../reference/transactapi/dbos-class.md) is available during initialize. Any validation of connection information (complete with diagnostic logging and reporting of any problems) should be performed in `initialize()`. 57 | 58 | ## Notes 59 | Event and handler registration decorators such as `@DBOS.scheduled`, `@KafkaConsume`, `@DBOS.getApi`, and `@DBOS.putApi` cannot be applied to instance methods. -------------------------------------------------------------------------------- /docs/typescript/tutorials/logging.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 50 3 | title: Logging & Tracing 4 | --- 5 | 6 | ### Logging 7 | 8 | For convenience, DBOS provides a pre-configured logger for you to use available at [`DBOS.logger`](../reference/transactapi/dbos-class#accessing-logging). 9 | For example: 10 | 11 | ```javascript 12 | DBOS.logger.info("Welcome to DBOS!"); 13 | ``` 14 | 15 | You can [configure](../reference/configuration.md) the log level of this built-in logger. 16 | This also configures the log level of the DBOS library: 17 | 18 | ```javascript 19 | DBOS.setConfig({ 20 | name: 'my-app', 21 | databaseUrl: process.env.DBOS_DATABASE_URL, 22 | logLevel: "info", 23 | }); 24 | await DBOS.launch(); 25 | ``` 26 | 27 | ### Tracing 28 | 29 | DBOS automatically constructs [OpenTelemetry](https://opentelemetry.io/) traces of all workflows and their steps. 30 | 31 | DBOS constructs hierarchical [spans](https://opentelemetry.io/docs/concepts/signals/traces/#spans) for workflows and each of their steps. 32 | For example, if an HTTP endpoint calls a workflow that calls a transaction, DBOS constructs a trace encompassing the entire request, with spans for the HTTP endpoint, the workflow, and the transaction. 33 | The transaction span is a child of the workflow span, which is a child of the HTTP endpoint span. 34 | You can access your current span via [`DBOS.span`](../reference/transactapi/dbos-class#accessing-the-tracing-span). 35 | 36 | ### OpenTelemetry Export 37 | 38 | You can export DBOS traces to any OpenTelemetry Protocol (OTLP)-compliant receiver. 39 | 40 | You can [configure](../reference/configuration.md) a custom export target. 41 | For example: 42 | 43 | ```javascript 44 | DBOS.setConfig({ 45 | name: 'my-app', 46 | databaseUrl: process.env.DBOS_DATABASE_URL, 47 | otlpTracesEndpoints: ["http://localhost:4318/v1/traces"], 48 | }); 49 | await DBOS.launch(); 50 | ``` 51 | 52 | For example, try using [Jaeger](https://www.jaegertracing.io/docs/latest/getting-started/) to visualize the traces of your local application. 53 | -------------------------------------------------------------------------------- /docs/typescript/tutorials/requestsandevents/_category_.json: -------------------------------------------------------------------------------- 1 | { 2 | "label": "Handling Requests And Events", 3 | "position": 200 4 | } 5 | -------------------------------------------------------------------------------- /docs/typescript/tutorials/scheduled-workflows.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 48 3 | title: Scheduled Workflows 4 | --- 5 | 6 | You can schedule DBOS [workflows](./workflow-tutorial.md) to run exactly once per time interval. 7 | To do this, annotate the workflow with the [`@DBOS.scheduled`](../reference/transactapi/dbos-class#scheduled-workflows) decorator and specify the schedule in [crontab](https://en.wikipedia.org/wiki/Cron) syntax. For example: 8 | 9 | ```typescript 10 | import { DBOS } from '@dbos-inc/dbos-sdk'; 11 | 12 | class ScheduledExample{ 13 | @DBOS.workflow() 14 | @DBOS.scheduled({crontab: '*/30 * * * * *'}) 15 | static async scheduledFunc(schedTime: Date, startTime: Date) { 16 | DBOS.logger.info(`I am a workflow scheduled to run every 30 seconds`); 17 | } 18 | } 19 | ``` 20 | 21 | Scheduled workflows must take in exactly two arguments: the time that the run was scheduled (as a `Date`) and the time the run was actually started (as a `Date`). 22 | 23 | To learn more about crontab syntax, see [this guide](https://docs.gitlab.com/ee/topics/cron/) or [this crontab editor](https://crontab.guru/). 24 | The specification for the DBOS variant can be found in the [DBOS API reference](../reference/transactapi/dbos-class#crontab-specification). 25 | 26 | The DBOS crontab format supports some common extensions, as seen in the following examples: 27 | - `* * * * *`: Every minute of every day 28 | - `* 5 * * 1`: Every minute during the hour of 05:00 on Mondays. 29 | - `30 * * * * *`: 30 seconds after the beginning of every minute 30 | - `0 12 * * Sun`: At noon on sundays 31 | - `0 9-17 * * *`: At the top of the hour, from 9am to 5pm 32 | - `*/30 * * * * *`: Every 30 seconds (seconds is evenly divisible by 30) 33 | - `10-19/2 * * January,Feb *`: At 10, 12, 14, 16, and 18 minutes into each hour of every day, only in the months of January and February 34 | 35 | ### How Scheduling Works 36 | Under the hood, DBOS constructs an [idempotency key](./workflow-tutorial.md#workflow-ids-and-idempotency) for each workflow invocation. The key is a concatenation of the function name and the scheduled time, ensuring each scheduled invocation occurs exactly once while your application is active. 37 | 38 | Sometimes, you may require a scheduled workflow run **exactly once** per interval, even if the application was offline when it should have run. 39 | For example, if your workflow is supposed to run every Friday at 9 PM UTC, but your application is offline for maintenance one Friday, you may want the workflow to launch as soon as your application is restarted. 40 | You can configure this behavior in the `DBOS.scheduled` decorator: 41 | 42 | ```typescript 43 | @DBOS.scheduled({mode: SchedulerMode.ExactlyOncePerInterval, crontab: '...'}) 44 | ``` -------------------------------------------------------------------------------- /docs/typescript/tutorials/step-tutorial.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 20 3 | title: Steps 4 | description: Learn how to communicate with external APIs and services 5 | --- 6 | 7 | When using DBOS workflows, you should annotate any function that performs complex operations or accesses external APIs or services as a _step_. 8 | If a workflow is interrupted, upon restart it automatically resumes execution from the **last completed step**. 9 | 10 | You can turn **any** TypeScript function into a step by annotating it with the [`@DBOS.step`](../reference/transactapi/dbos-class.md#dbosstep) decorator. 11 | The only requirements are that it must be a static class member function and that its inputs and outputs should be serializable to JSON. 12 | Here's a simple example: 13 | 14 | ```javascript 15 | class Example { 16 | @DBOS.step() 17 | static async exampleStep() { 18 | return await fetch("https://example.com").then(r => r.text()); 19 | } 20 | } 21 | ``` 22 | 23 | You should make a function a step if you're using it in a DBOS workflow and it performs a [**nondeterministic**](../tutorials/workflow-tutorial.md#determinism) operation. 24 | A nondeterministic operation is one that may return different outputs given the same inputs. 25 | Common nondeterministic operations include: 26 | 27 | - Accessing an external API or service, like serving a file from [AWS S3](https://aws.amazon.com/s3/), calling an external API like [Stripe](https://stripe.com/), or accessing an external data store like [Elasticsearch](https://www.elastic.co/elasticsearch/). 28 | - Accessing files on disk. 29 | - Generating a random number. 30 | - Getting the current time. 31 | 32 | You **cannot** call, start, or enqueue workflows from within steps. 33 | You also cannot call DBOS methods like `DBOS.send` or `DBOS.setEvent` from within steps. 34 | These operations should be performed from workflow functions. 35 | You can call one step from another step, but the called step becomes part of the calling step's execution rather than functioning as a separate step. 36 | 37 | ### Configurable Retries 38 | 39 | You can optionally configure a step to automatically retry any exception a set number of times with exponential backoff. 40 | This is useful for automatically handling transient failures, like making requests to unreliable APIs. 41 | Retries are configurable through arguments to the [step decorator](../reference/transactapi/dbos-class.md#dbosstep): 42 | 43 | ```typescript 44 | export interface StepConfig { 45 | retriesAllowed?: boolean; // Should failures be retried? (default false) 46 | intervalSeconds?: number; // Seconds to wait before the first retry attempt (default 1). 47 | maxAttempts?: number; // Maximum number of retry attempts (default 3). If errors occur more times than this, throw an exception. 48 | backoffRate?: number; // Multiplier by which the retry interval increases after a retry attempt (default 2). 49 | } 50 | ``` 51 | 52 | For example, let's configure this step to retry exceptions (such as if `example.com` is temporarily down) up to 10 times: 53 | 54 | ```javascript 55 | @DBOS.step({retriesAllowed=true, maxAttempts: 10}) 56 | static async exampleStep() { 57 | return await fetch("https://example.com").then(r => r.text()); 58 | } 59 | ``` 60 | 61 | If a step exhausts all `max_attempts` retries, it throws an exception (`DBOSMaxStepRetriesError`) to the calling workflow. 62 | If that exception is not caught, the workflow [terminates](./workflow-tutorial.md#reliability-guarantees). -------------------------------------------------------------------------------- /docs/typescript/tutorials/stored-proc-tutorial.md: -------------------------------------------------------------------------------- 1 | --- 2 | sidebar_position: 80 3 | title: Stored Procedures 4 | description: Learn how to improve database interaction performance with Stored Procedures 5 | --- 6 | 7 | In this guide, you'll learn how to interact with your database using stored procedures. 8 | 9 | Stored procedures are similar to [transaction functions](./transaction-tutorial.md). 10 | They are used to perform operations on your application database. 11 | However, stored procedure functions run inside the database where transaction functions run on the application server. 12 | Running your business logic inside the database improves both performance and scalability. 13 | Transaction functions require at least three network round trips to the database in addition to any queries executed by the transaction function itself. 14 | Stored procedure functions only require a single network round trip to the database, while still providing the same behavior and guarantees as transaction functions. 15 | 16 | While most database management systems provide support for stored procedures, they are often avoided because they are hard to use. 17 | They typically need to be written in a custom language such as [PL/pgSQL](https://www.postgresql.org/docs/current/plpgsql.html). 18 | Additionally, they are not usually integrated into the application development process. 19 | DBOS stored procedure functions, in contrast, are written in TypeScript like the rest of your DBOS application. 20 | The [DBOS Compiler](../reference/tools/dbos-compiler.md) deploys the stored procedure functions from your application to your application database. 21 | 22 | Here's an example of a stored procedure function. 23 | You'll notice it is similar to the [example transaction function](./transaction-tutorial.md) from the transaction tutorial. 24 | 25 | :::warning 26 | Because stored procedures run inside the database, only raw database queries are supported. 27 | Query builders like Knex.js and ORMs like TypeORM and Prisma are not supported in stored procedure functions. 28 | ::: 29 | 30 | ```javascript 31 | export class Greetings { 32 | @DBOS.storedProcedure() 33 | static async InsertGreeting(friend: string, note: string) { 34 | await DBOS.sqlClient.query('INSERT INTO greetings (name, note) VALUES ($1, $2)', [friend, note]); 35 | } 36 | } 37 | ``` 38 | 39 | ## Deploying DBOS Stored Procedures 40 | 41 | DBOS Stored procedure functions depend on [PLV8](https://plv8.github.io/), a trusted JavaScript language extension for PostgresSQL. 42 | PLV8 is supported out of the box on DBOS Cloud and several major cloud database providers. 43 | For running locally, we recommend using the [`sibedge/postgres-plv8` Docker image](https://plv8.com) provided by [Sibedge](https://sibedge.com/). 44 | For using DBOS Stored Procedures on your own PostgreSQL server, please see the [official PLV8 documentation](https://plv8.github.io/#building) for installation instructions. 45 | 46 | :::info 47 | As of version 1.17, the [@dbos-inc/create](../reference/tools/cli#npx-dbos-inccreate) templates have been updated to use `sibedge/postgres-plv8` in the `start_postgres_docker.js` script. 48 | Older DBOS applications using Docker will need to switch their PostgreSQL image from `postgres:16.1` to `sibedge/postgres-plv8` manually to support Stored Procedures. 49 | ::: 50 | 51 | Before running your DBOS application that uses stored procedures, you need to deploy those stored procedures to the database. 52 | To deploy your stored procedure functions to the database, you need the [DBOS Compiler](../reference/tools/dbos-compiler.md). 53 | Add the DBOS Compiler package as a devDependency of your app via NPM: 54 | 55 | ``` 56 | npm install --save-dev @dbos-inc/dbos-compiler 57 | ``` 58 | 59 | Once the DBOS Compiler is installed, you can use it to deploy the stored procedures to the database server specified 60 | in the [`dbos-config.yaml` file](../reference/configuration.md) with the following command: 61 | 62 | ``` 63 | npx dbosc deploy 64 | ``` 65 | 66 | :::info 67 | For information about all of the compiler's command line options, please see the [DBOS Compiler reference page](../reference/tools/dbos-compiler.md) 68 | ::: 69 | 70 | You can add `npx dbosc deploy` to your [database migration commands](../reference/configuration.md#database-section) to run it alongside other schema migrations. 71 | Deploying your app's stored procedures via Database Schema Management is required for DBOS Cloud deployment. 72 | 73 | ```yaml 74 | database: 75 | migrate: 76 | - npx knex migrate:latest 77 | - npx dbosc deploy 78 | ``` 79 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dbos-docs", 3 | "version": "0.0.0", 4 | "private": true, 5 | "scripts": { 6 | "docusaurus": "docusaurus", 7 | "start": "docusaurus start -p 4000", 8 | "build": "docusaurus build", 9 | "swizzle": "docusaurus swizzle", 10 | "deploy": "docusaurus deploy", 11 | "clear": "docusaurus clear", 12 | "serve": "docusaurus serve", 13 | "write-translations": "docusaurus write-translations", 14 | "write-heading-ids": "docusaurus write-heading-ids" 15 | }, 16 | "dependencies": { 17 | "@docusaurus/core": "^3.8.0", 18 | "@docusaurus/plugin-client-redirects": "^3.8.0", 19 | "@docusaurus/preset-classic": "^3.8.0", 20 | "@mdx-js/react": "^3.0.0", 21 | "@stackql/docusaurus-plugin-hubspot": "file:deps/stackql-docusaurus-plugin-hubspot-1.1.0.tgz", 22 | "clsx": "^1.2.1", 23 | "docusaurus-plugin-matomo": "^0.0.8", 24 | "posthog-docusaurus": "^2.0.1", 25 | "prism-react-renderer": "^2.1.0", 26 | "react": "^18.2.0", 27 | "react-dom": "^18.2.0", 28 | "react-icons": "^5.3.0" 29 | }, 30 | "devDependencies": { 31 | "@docusaurus/module-type-aliases": "^3.8.0", 32 | "@docusaurus/types": "^3.8.0" 33 | }, 34 | "browserslist": { 35 | "production": [ 36 | ">0.5%", 37 | "not dead", 38 | "not op_mini all" 39 | ], 40 | "development": [ 41 | "last 1 chrome version", 42 | "last 1 firefox version", 43 | "last 1 safari version" 44 | ] 45 | }, 46 | "engines": { 47 | "node": ">=18.0" 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /sidebars.js: -------------------------------------------------------------------------------- 1 | /** 2 | * Creating a sidebar enables you to: 3 | - create an ordered group of docs 4 | - render a sidebar for each doc of that group 5 | - provide next/previous navigation 6 | 7 | The sidebars can be generated from the filesystem, or explicitly defined here. 8 | 9 | Create as many sidebars as you want. 10 | */ 11 | 12 | // @ts-check 13 | 14 | /** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */ 15 | const sidebars = { 16 | tutorialSidebar: [ 17 | { 18 | type: 'doc', 19 | id: 'index', 20 | label: 'Home', 21 | }, 22 | { 23 | type: 'doc', 24 | id: 'quickstart', 25 | label: 'Get Started', 26 | }, 27 | { 28 | type: 'doc', 29 | id: 'why-dbos', 30 | label: 'Why DBOS?', 31 | }, 32 | { 33 | type: 'category', 34 | label: 'Develop with Python', 35 | items: [ 36 | { 37 | type: 'autogenerated', 38 | dirName: 'python', 39 | } 40 | ], 41 | }, 42 | { 43 | type: 'category', 44 | label: 'Develop with TypeScript', 45 | items: [ 46 | { 47 | type: 'autogenerated', 48 | dirName: 'typescript', 49 | } 50 | ], 51 | }, 52 | { 53 | type: 'category', 54 | label: 'Deploy To Production', 55 | items: [ 56 | { 57 | type: 'autogenerated', 58 | dirName: 'production', 59 | } 60 | ], 61 | }, 62 | { 63 | type: 'category', 64 | label: 'Integrations', 65 | items: [ 66 | { 67 | type: 'autogenerated', 68 | dirName: 'integrations', 69 | } 70 | ], 71 | }, 72 | { 73 | type: 'category', 74 | label: 'Concepts and Explanations', 75 | items: [ 76 | { 77 | type: 'autogenerated', 78 | dirName: 'explanations', 79 | } 80 | ], 81 | }, 82 | { 83 | type: 'doc', 84 | id: 'faq', 85 | label: 'Troubleshooting & FAQ', 86 | }, 87 | ], 88 | 89 | examplesSidebar: [ 90 | { 91 | type: 'doc', 92 | id: 'examples/index', 93 | label: 'Overview', 94 | }, 95 | { 96 | type: 'category', 97 | label: 'Python Examples', 98 | items: [ 99 | { 100 | type: 'autogenerated', 101 | dirName: 'python/examples', 102 | } 103 | ], 104 | collapsed: false, 105 | }, 106 | { 107 | type: 'category', 108 | label: 'TypeScript Examples', 109 | items: [ 110 | { 111 | type: 'autogenerated', 112 | dirName: 'typescript/examples', 113 | } 114 | ], 115 | collapsed: false, 116 | } 117 | ], 118 | }; 119 | 120 | module.exports = sidebars; 121 | -------------------------------------------------------------------------------- /src/components/BrowserWindow/index.tsx: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) Facebook, Inc. and its affiliates. 3 | * 4 | * This source code is licensed under the MIT license found in the 5 | * LICENSE file in the root directory of this source tree. 6 | */ 7 | 8 | import React, {type CSSProperties, type ReactNode} from 'react'; 9 | import clsx from 'clsx'; 10 | 11 | import styles from './styles.module.css'; 12 | 13 | interface Props { 14 | children: ReactNode; 15 | minHeight?: number; 16 | url: string; 17 | style?: CSSProperties; 18 | bodyStyle?: CSSProperties; 19 | } 20 | 21 | export function BrowserWindow({ 22 | children, 23 | minHeight, 24 | url = 'http://localhost:3000', 25 | style, 26 | bodyStyle, 27 | }: Props): JSX.Element { 28 | return ( 29 |
30 |
31 |
32 | 33 | 34 | 35 |
36 |
37 | {url} 38 |
39 |
40 |
41 | 42 | 43 | 44 |
45 |
46 |
47 | 48 |
49 | {children} 50 |
51 |
52 | ); 53 | } 54 | 55 | // Quick and dirty component, to improve later if needed 56 | export function IframeWindow({url}: {url: string}): JSX.Element { 57 | return ( 58 |
59 | 68 |