├── .envrc
├── .github
├── actions
│ └── gateway-tests
│ │ └── action.yml
├── pull_request_template.md
├── scripts
│ └── free-disk-space.sh
└── workflows
│ ├── ci.yml
│ ├── containers.yml
│ ├── deploy.yml
│ ├── docker.yml
│ ├── releaser.yml
│ ├── tools-manifest.yml
│ └── tools.yml
├── .gitignore
├── Dockerfile
├── LICENSE.md
├── LabBio_whitetext_transparent.png
├── LabDAO_Logo_Teal.png
├── README.md
├── checkpoint_0_event.csv
├── cmd
├── root.go
├── upgrade.go
└── web.go
├── contracts
├── README.md
├── artifacts
│ ├── @openzeppelin
│ │ └── contracts
│ │ │ ├── token
│ │ │ └── ERC1155
│ │ │ │ ├── ERC1155.sol
│ │ │ │ ├── ERC1155.dbg.json
│ │ │ │ └── ERC1155.json
│ │ │ │ ├── IERC1155.sol
│ │ │ │ ├── IERC1155.dbg.json
│ │ │ │ └── IERC1155.json
│ │ │ │ ├── IERC1155Receiver.sol
│ │ │ │ ├── IERC1155Receiver.dbg.json
│ │ │ │ └── IERC1155Receiver.json
│ │ │ │ └── extensions
│ │ │ │ └── IERC1155MetadataURI.sol
│ │ │ │ ├── IERC1155MetadataURI.dbg.json
│ │ │ │ └── IERC1155MetadataURI.json
│ │ │ └── utils
│ │ │ ├── Context.sol
│ │ │ ├── Context.dbg.json
│ │ │ └── Context.json
│ │ │ └── introspection
│ │ │ ├── ERC165.sol
│ │ │ ├── ERC165.dbg.json
│ │ │ └── ERC165.json
│ │ │ └── IERC165.sol
│ │ │ ├── IERC165.dbg.json
│ │ │ └── IERC165.json
│ └── contracts
│ │ └── ProofOfScience.sol
│ │ ├── ProofOfScience.dbg.json
│ │ └── ProofOfScience.json
├── contracts
│ └── ProofOfScience.sol
├── hardhat.config.js
├── ignition
│ └── modules
│ │ └── ProofOfScience.js
├── package-lock.json
├── package.json
└── test
│ └── ProofOfScience.js
├── docker-compose-gpu.yml
├── docker-compose.private.yml
├── docker-compose.tunnel.yml
├── docker-compose.yml
├── docker
├── images
│ ├── bacalhau
│ │ └── Dockerfile
│ └── ipfs
│ │ ├── Dockerfile
│ │ ├── README.md
│ │ └── container-init.d
│ │ ├── 001-custom-config.sh
│ │ ├── 002-s3-keyTransformation.sh
│ │ ├── 003-bootstrap-rm.sh
│ │ ├── 004-swarm-base64-key.sh
│ │ ├── 005-gateway-port.sh
│ │ ├── 006-datastore.sh
│ │ ├── 007-remote-pinning-service.sh
│ │ └── 999-debug-config.sh
└── ipfs_data
│ ├── config
│ ├── datastore_spec
│ └── swarm.key
├── docs
├── README.md
├── babel.config.js
├── docs
│ ├── reference
│ │ ├── api.md
│ │ └── python.md
│ └── tutorials
│ │ └── tutorials.md
├── docusaurus.config.js
├── package-lock.json
├── package.json
├── scripts
│ ├── ascicast-commands.txt
│ ├── create_ascicast.sh
│ └── find_unused_images.py
├── sidebars.js
├── src
│ ├── components
│ │ ├── AsciinemaPlayer.js
│ │ └── OpenInColab.js
│ └── css
│ │ └── custom.css
├── static
│ ├── .nojekyll
│ └── img
│ │ ├── LabDAO_Favicon_Teal.png
│ │ ├── favicon.ico
│ │ ├── labdao-icon.png
│ │ ├── labdaologo_brandmark_Teal.png
│ │ ├── logo.svg
│ │ ├── protein-folding-graphic.png
│ │ └── small-molecule-binding-graphic.png
├── tsconfig.json
└── yarn.lock
├── frontend
├── .dockerignore
├── .eslintrc.json
├── .gitignore
├── Dockerfile
├── README.md
├── app
│ ├── (auth)
│ │ └── login
│ │ │ └── page.tsx
│ ├── (resources)
│ │ ├── infrastructure
│ │ │ └── page.tsx
│ │ └── philosophy
│ │ │ └── page.tsx
│ ├── api
│ │ ├── identity-count
│ │ │ └── route.ts
│ │ └── page.tsx
│ ├── data
│ │ ├── AddFileForm.tsx
│ │ └── page.tsx
│ ├── experiments
│ │ ├── (experiment)
│ │ │ ├── (forms)
│ │ │ │ ├── ContinuousSwitch.tsx
│ │ │ │ ├── DynamicArrayField.tsx
│ │ │ │ ├── ExperimentRenameForm.tsx
│ │ │ │ ├── NewExperimentForm.tsx
│ │ │ │ ├── RerunExperimentForm.tsx
│ │ │ │ ├── formGenerator.ts
│ │ │ │ └── formUtils.ts
│ │ │ ├── (results)
│ │ │ │ ├── ExperimentResults.tsx
│ │ │ │ ├── JobDetail.tsx
│ │ │ │ ├── JobsAccordion.tsx
│ │ │ │ ├── LogViewer.tsx
│ │ │ │ └── MetricsVisualizer.tsx
│ │ │ ├── ExperimentStatus.tsx
│ │ │ ├── ExperimentSummary.tsx
│ │ │ ├── ExperimentUIContext.tsx
│ │ │ ├── ModelPanel
│ │ │ │ ├── ModelGuide.tsx
│ │ │ │ ├── ModelInfo.tsx
│ │ │ │ └── index.tsx
│ │ │ ├── [experimentID]
│ │ │ │ ├── ExperimentDetail.tsx
│ │ │ │ ├── ExperimentShare.tsx
│ │ │ │ └── page.tsx
│ │ │ ├── layout.tsx
│ │ │ └── new
│ │ │ │ └── [taskSlug]
│ │ │ │ └── [[...toolCID]]
│ │ │ │ └── page.tsx
│ │ └── page.tsx
│ ├── layout.tsx
│ ├── models
│ │ ├── @add
│ │ │ └── page.tsx
│ │ ├── @list
│ │ │ └── page.tsx
│ │ ├── default.tsx
│ │ └── layout.tsx
│ ├── not-found.tsx
│ ├── page.tsx
│ ├── subscribe
│ │ └── page.tsx
│ ├── subscription
│ │ └── manage
│ │ │ └── page.tsx
│ └── tasks
│ │ ├── TaskCard.tsx
│ │ ├── TasksMenu.tsx
│ │ ├── page.tsx
│ │ └── taskList.ts
├── components.json
├── components
│ ├── auth
│ │ ├── PrivyLoginButton.tsx
│ │ └── ProtectedComponent.tsx
│ ├── global
│ │ ├── Breadcrumbs.tsx
│ │ ├── Logo.tsx
│ │ ├── Nav.tsx
│ │ ├── NavItem.tsx
│ │ ├── PoweredByLogo.tsx
│ │ └── UserMenu.tsx
│ ├── payment
│ │ ├── StripeCheckoutButton.tsx
│ │ └── TransactionSummaryInfo.tsx
│ ├── shared
│ │ ├── CopyToClipboard.tsx
│ │ ├── FileSelect.tsx
│ │ ├── ModelSelect.tsx
│ │ ├── PageLoader.tsx
│ │ ├── TruncatedString.tsx
│ │ └── ViewportWarning.tsx
│ ├── ui
│ │ ├── accordion.tsx
│ │ ├── alert-dialog.tsx
│ │ ├── alert.tsx
│ │ ├── badge.tsx
│ │ ├── boolean-input.tsx
│ │ ├── button.tsx
│ │ ├── card.tsx
│ │ ├── collapsible.tsx
│ │ ├── command.tsx
│ │ ├── data-pagination.tsx
│ │ ├── data-table-column-header.tsx
│ │ ├── data-table.tsx
│ │ ├── dialog.tsx
│ │ ├── dropdown-menu.tsx
│ │ ├── form.tsx
│ │ ├── inline-edit-experiment.tsx
│ │ ├── input.tsx
│ │ ├── label.tsx
│ │ ├── popover.tsx
│ │ ├── scroll-area.tsx
│ │ ├── select.tsx
│ │ ├── separator.tsx
│ │ ├── sonner.tsx
│ │ ├── switch.tsx
│ │ ├── table.tsx
│ │ ├── tabs.tsx
│ │ ├── textarea.tsx
│ │ └── tooltip.tsx
│ └── visualization
│ │ └── Molstar
│ │ ├── index.tsx
│ │ └── skin.scss
├── fonts
│ ├── FKRasterRomanCompact-Blended.woff2
│ ├── PPFraktionMono-Bold.woff2
│ ├── PPFraktionMono-BoldItalic.woff2
│ ├── PPFraktionMono-Regular.woff2
│ ├── PPFraktionMono-RegularItalic.woff2
│ ├── PPNeueMontreal-Bold.woff2
│ ├── PPNeueMontreal-BoldItalic.woff2
│ ├── PPNeueMontreal-Italic.woff2
│ └── PPNeueMontreal-Regular.woff2
├── lib
│ ├── PrivyContext.tsx
│ ├── PrivyProviderWrapper.tsx
│ ├── backendUrl.ts
│ ├── planTemplate.ts
│ ├── providers.tsx
│ ├── redux
│ │ ├── createAppAsyncThunk.ts
│ │ ├── index.ts
│ │ ├── middleware.ts
│ │ ├── rootReducer.ts
│ │ ├── slices
│ │ │ ├── apiKeyAddSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── apiKeyListSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── experimentAddSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── experimentDetailSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── experimentListSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── experimentNamesSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── experimentUpdateSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── fileAddSlice
│ │ │ │ ├── actions.ts
│ │ │ │ ├── dataSlice.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── fileListSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── index.ts
│ │ │ ├── jobDetailSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── modelAddSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── modelDetailSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── modelListSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── stripeCheckoutSlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ ├── transactionsSummarySlice
│ │ │ │ ├── asyncActions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── slice.ts
│ │ │ │ └── thunks.ts
│ │ │ └── userSlice
│ │ │ │ ├── actions.ts
│ │ │ │ ├── index.ts
│ │ │ │ ├── selectors.ts
│ │ │ │ ├── thunks.ts
│ │ │ │ └── userSlice.ts
│ │ └── store.ts
│ └── utils.ts
├── next.config.mjs
├── package-lock.json
├── package.json
├── postcss.config.js
├── public
│ ├── browserconfig.xml
│ ├── icons
│ │ ├── android-chrome-192x192.png
│ │ ├── android-chrome-512x512.png
│ │ ├── apple-touch-icon.png
│ │ ├── favicon-16x16.png
│ │ ├── favicon-32x32.png
│ │ ├── favicon.ico
│ │ ├── mstile-144x144.png
│ │ ├── mstile-150x150.png
│ │ ├── mstile-310x150.png
│ │ ├── mstile-310x310.png
│ │ ├── mstile-70x70.png
│ │ └── safari-pinned-tab.svg
│ ├── images
│ │ ├── bg-brand-pattern.svg
│ │ ├── task-community-models.png
│ │ ├── task-protein-binder-design.png
│ │ ├── task-protein-docking.png
│ │ ├── task-protein-folding.png
│ │ └── task-small-molecule-docking.png
│ └── site.webmanifest
├── run.sh
├── styles
│ └── globals.css
├── tailwind.config.js
└── tsconfig.json
├── gateway
├── README.md
├── app.go
├── handlers
│ ├── api.go
│ ├── checkpoints.go
│ ├── experiments.go
│ ├── files.go
│ ├── healthcheck.go
│ ├── jobs.go
│ ├── models.go
│ ├── stripe.go
│ ├── tags.go
│ ├── transactions.go
│ └── users.go
├── lab-bio-api-spec.json
├── middleware
│ └── auth.go
├── migrations
│ ├── 10_update_other_to_community_models.up.sql
│ ├── 11_add_s3_info_to_jobs.down.sql
│ ├── 11_add_s3_info_to_jobs.up.sql
│ ├── 12_populate_generated_datafile_wallet_address.up.sql
│ ├── 13_add_user_admins.down.sql
│ ├── 13_add_user_admins.up.sql
│ ├── 14_add_datafile_public_attribute.down.sql
│ ├── 14_add_datafile_public_attribute.up.sql
│ ├── 15_create_user_data_file_table.up.sql
│ ├── 16_add_flow_public_attribute.down.sql
│ ├── 16_add_flow_public_attribute.up.sql
│ ├── 17_add_job_public_attribute.down.sql
│ ├── 17_add_job_public_attribute.up.sql
│ ├── 18_add_exp_uuid.down.sql
│ ├── 18_add_exp_uuid.up.sql
│ ├── 19_add_flow_record_cid_attribute.down.sql
│ ├── 19_add_flow_record_cid_attribute.up.sql
│ ├── 1_initial_schema.down.sql
│ ├── 1_initial_schema.up.sql
│ ├── 20_add_job_retry_max_runtime.down.sql
│ ├── 20_add_job_retry_max_runtime.up.sql
│ ├── 21_jobs_job_type.down.sql
│ ├── 21_jobs_job_type.up.sql
│ ├── 22_add_job_result_json_column.down.sql
│ ├── 22_add_job_result_json_column.up.sql
│ ├── 23_datafile_s3_location_columns.down.sql
│ ├── 23_datafile_s3_location_columns.up.sql
│ ├── 24_tool_ray_service_endpoint_rename.down.sql
│ ├── 24_tool_ray_service_endpoint_rename.up.sql
│ ├── 25_s3_uri_in_datafile_table.down.sql
│ ├── 25_s3_uri_in_datafile_table.up.sql
│ ├── 26_tool_s3_uri_column.down.sql
│ ├── 26_tool_s3_uri_column.up.sql
│ ├── 27_flow_drop_cid.down.sql
│ ├── 27_flow_drop_cid.up.sql
│ ├── 28_job_id_updates.up copy.sql
│ ├── 28_job_id_updates.up.sql
│ ├── 29_add_tier_and_compute_tally.down.sql
│ ├── 29_add_tier_and_compute_tally.up.sql
│ ├── 2_add_tags_schema.down.sql
│ ├── 2_add_tags_schema.up.sql
│ ├── 30_add_compute_cost.down.sql
│ ├── 30_add_compute_cost.up.sql
│ ├── 31_add_stripe_user_id.down.sql
│ ├── 31_add_stripe_user_id.up.sql
│ ├── 32_flow_to_experiment_rename.down.sql
│ ├── 32_flow_to_experiment_rename.up.sql
│ ├── 33_datafile_to_file_rename.down.sql
│ ├── 33_datafile_to_file_rename.up.sql
│ ├── 34_tool_to_model_rename.down.sql
│ ├── 34_tool_to_model_rename.up.sql
│ ├── 35_update_DB_schema.up.sql
│ ├── 36_user_subscriptions.down.sql
│ ├── 36_user_subscriptions.up.sql
│ ├── 37_filehash_nullable.down.sql
│ ├── 37_filehash_nullable.up.sql
│ ├── 38_job_type_column.down.sql
│ ├── 38_job_type_column.up.sql
│ ├── 39_inference_event_filename_column.down.sql
│ ├── 39_inference_event_filename_column.up.sql
│ ├── 3_seed_tags_table.down.sql
│ ├── 3_seed_tags_table.up.sql
│ ├── 4_add_timestamps.down.sql
│ ├── 4_add_timestamps.up.sql
│ ├── 5_change_for_queue_support.up.sql
│ ├── 6_add_tool_flags.down.sql
│ ├── 6_add_tool_flags.up.sql
│ ├── 7_add_api_keys_table.down.sql
│ ├── 7_add_api_keys_table.up.sql
│ ├── 8_add_api_keys_to_users.down.sql
│ ├── 8_add_api_keys_to_users.up.sql
│ ├── 9_add_transaction_table.down.sql
│ └── 9_add_transaction_table.up.sql
├── models
│ ├── design.go
│ ├── experiment.go
│ ├── file.go
│ ├── fileevent.go
│ ├── inferenceevent.go
│ ├── job.go
│ ├── key.go
│ ├── model.go
│ ├── organization.go
│ ├── rayjobresponse.go
│ ├── scatterplotdata.go
│ ├── tag.go
│ ├── transaction.go
│ ├── user.go
│ └── userevent.go
├── server
│ └── server.go
├── test_api.sh
└── utils
│ ├── api.go
│ ├── queue.go
│ ├── stripe.go
│ ├── utils.go
│ └── web3.go
├── go.mod
├── go.sum
├── infrastructure
└── ansible
│ ├── README.md
│ ├── ansible.cfg
│ ├── files
│ ├── bacalhau.service
│ ├── compute.service
│ ├── compute.yaml
│ ├── environment.py
│ ├── instance-terminator.j2
│ ├── ipfs.service
│ ├── ipfs
│ │ └── 0001-bucket-config.sh
│ ├── jupyter_notebook_config.py
│ ├── receptor-config
│ ├── receptor.service
│ ├── requester.service
│ └── requester.yaml
│ ├── install_requirements.yaml
│ ├── inventory.aws_ec2.yaml
│ ├── jupyter_deploy_plex.yaml
│ ├── jupyter_set_users.yaml
│ ├── provision_compute_instance.yaml
│ ├── provision_compute_only.yaml
│ ├── provision_ipfs.yaml
│ ├── provision_jupyter.yaml
│ ├── provision_receptor.yaml
│ ├── provision_requester.yaml
│ ├── requirements.yaml
│ ├── tasks
│ ├── common_tasks.yaml
│ ├── install_bacalhau_tasks.yaml
│ ├── install_docker_tasks.yaml
│ ├── install_gpu_tasks.yaml
│ ├── install_ipfs_docker_tasks.yaml
│ ├── install_plex_tasks.yaml
│ ├── jupyter_team_setup_tasks.yaml
│ └── pull_common_containers.yaml
│ └── vars
│ └── teams.yaml
├── internal
├── ipfs
│ ├── ipfs.go
│ └── ipfs_test.go
├── ipwl
│ ├── initialize_io.go
│ ├── io.go
│ ├── io_test.go
│ ├── model.go
│ ├── model_test.go
│ └── testdata
│ │ ├── binding
│ │ └── abl
│ │ │ ├── 7n9g.pdb
│ │ │ ├── ZINC000003986735.sdf
│ │ │ └── ZINC000019632618.sdf
│ │ ├── example_equibind_io.json
│ │ ├── example_initial_io.json
│ │ ├── example_io.json
│ │ └── example_tool.json
├── ray
│ └── ray.go
├── s3
│ ├── minio.go
│ └── s3.go
└── web3
│ ├── name_factory.go
│ ├── nft.go
│ └── wallet.go
├── jobs
└── .gitkeep
├── main.go
├── models
├── colabdesign
│ ├── CHANGELOG.md
│ ├── Dockerfile
│ ├── base_classes.py
│ ├── colabdesign.json
│ ├── conf
│ │ ├── config.yaml
│ │ ├── inputs
│ │ │ └── container.yaml
│ │ └── outputs
│ │ │ ├── container.yaml
│ │ │ └── local.yaml
│ ├── main.py
│ ├── requirements.txt
│ ├── test.sh
│ ├── testdata
│ │ ├── inputs
│ │ │ └── target_protein
│ │ │ │ └── pdc_upar_1_target.pdb
│ │ └── viz-input
│ │ │ └── Condition_52_Design_0.pdb
│ ├── visualisers.py
│ └── viz-requirements.txt
├── labsay
│ ├── CHANGELOG.md
│ ├── Dockerfile
│ ├── README.md
│ ├── labsay_a.json
│ ├── labsay_b.json
│ ├── main.py
│ ├── test.sh
│ ├── testdata
│ │ └── inputs
│ │ │ ├── BioCD202b18_aa_7fd4f_unrelaxed_rank_003_alphafold2_multimer_v3_model_2_seed_000.pdb
│ │ │ ├── design_1.pdb
│ │ │ ├── example.pdb
│ │ │ ├── file_example
│ │ │ └── message.txt
│ │ │ └── pdb_checkpoints
│ │ │ ├── BioCD202b18_aa_7fd4f_unrelaxed_rank_003_alphafold2_multimer_v3_model_2_seed_000.pdb
│ │ │ ├── design_1.pdb
│ │ │ └── example.pdb
│ └── user_input.json
└── ray
│ ├── colabdesign_ray_job.json
│ ├── colabfold_demo_ray_job.json
│ ├── design_with_colabdesign.json
│ ├── generate_protein_binder_backbone.json
│ ├── sample_rfdiffusion.json
│ └── simulate-protein-complex.json
├── pkg
└── README.md
├── receptor
├── Dockerfile
├── go.mod
├── go.sum
├── main.go
└── models
│ ├── job.go
│ └── setup.go
├── run_plex_multiple.sh
├── scripts
├── asciicast
│ ├── asciicast-commands.txt
│ └── create-asciicast.sh
└── build-containers.sh
├── testdata
├── binding
│ ├── abl
│ │ ├── 7n9g.pdb
│ │ ├── ZINC000003986735.sdf
│ │ └── ZINC000019632618.sdf
│ ├── pdbbind_processed_size1
│ │ └── 6d08
│ │ │ ├── 6d08_ligand.sdf
│ │ │ └── 6d08_protein_processed.pdb
│ └── pdbbind_processed_size2
│ │ ├── 6d08
│ │ ├── 6d08_ligand.sdf
│ │ └── 6d08_protein_processed.pdb
│ │ └── 6qqu
│ │ ├── 6qqu_ligand.mol2
│ │ └── 6qqu_protein_processed.pdb
├── design
│ └── insulin_target.pdb
└── ipfs_test
│ ├── haiku2.txt
│ ├── test.csv
│ └── testset_csv.csv
└── uploaded
└── 96942c2c5f73f89849f3ff183dafd864e350dbeaa899a7ba4fcce3c7fcaaf50d
└── a1bf7360-5f8f-46e2-8461-850d68e15d00_unrelaxed_rank_001_alphafold2_multimer_v3_model_3_seed_000 (1).pdb
/.envrc:
--------------------------------------------------------------------------------
1 | dotenv
2 |
--------------------------------------------------------------------------------
/.github/pull_request_template.md:
--------------------------------------------------------------------------------
1 | ## What type of PR is this?
2 | _Remove the categories that do not apply_
3 |
4 | - 🎮 Feature
5 | - 🐛 Bug Fix
6 | - 🍧 Refactor
7 | - 🔋 Optimization
8 | - 📓 Documentation Update
9 |
10 | ## Description
11 |
12 | _Please provide a brief description of the changes here. Include why they are needed, if applicable._
13 |
14 | _Any demonstrations of the changes (Loom, gif, etc.) may also go here._
15 |
16 | ## Related Tickets & Documents
17 |
18 | _Reference as needed._
19 |
20 | ## Steps to Test
21 |
22 | _A list of steps a reviewer can walk through to test the changes._
23 |
24 | ## Relevant GIF
25 |
26 | _Required 🐔_
27 |
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 LabDAO
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/LabBio_whitetext_transparent.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/LabBio_whitetext_transparent.png
--------------------------------------------------------------------------------
/LabDAO_Logo_Teal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/LabDAO_Logo_Teal.png
--------------------------------------------------------------------------------
/checkpoint_0_event.csv:
--------------------------------------------------------------------------------
1 | cycle,proposal,factor1,factor2,dim1,dim2,pdbFileName
2 | 0,0,54.47722375392914,14.772933065891266,7.620224475860596,-13.13,design0_n0.pdb
3 |
--------------------------------------------------------------------------------
/cmd/root.go:
--------------------------------------------------------------------------------
1 | package cmd
2 |
3 | import (
4 | "fmt"
5 | "os"
6 |
7 | "github.com/spf13/cobra"
8 | )
9 |
10 | var rootCmd = &cobra.Command{
11 | Use: "plex",
12 | Short: "Plex is a CLI application for running scientific workflows on peer to peer compute",
13 | Long: `Plex is a CLI application for running scientific workflows on peer to peer compute. Complete documentation is available at https://docs.labdao.xyz/`,
14 | Run: func(cmd *cobra.Command, args []string) {
15 | dry := true
16 | upgradePlexVersion(dry)
17 |
18 | fmt.Println("Type ./plex --help to see commands")
19 | },
20 | }
21 |
22 | func Execute() {
23 | if err := rootCmd.Execute(); err != nil {
24 | fmt.Println(err)
25 | os.Exit(1)
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/cmd/web.go:
--------------------------------------------------------------------------------
1 | package cmd
2 |
3 | import (
4 | "github.com/labdao/plex/gateway"
5 | "github.com/spf13/cobra"
6 | )
7 |
8 | var webCmd = &cobra.Command{
9 | Use: "web",
10 | Short: "Runs the Gateway web app",
11 | Long: `Runs the Gateway web app`,
12 | Run: func(cmd *cobra.Command, args []string) {
13 | dry := true
14 | upgradePlexVersion(dry)
15 | gateway.ServeWebApp()
16 | },
17 | }
18 |
19 | func init() {
20 | rootCmd.AddCommand(webCmd)
21 | }
22 |
--------------------------------------------------------------------------------
/contracts/README.md:
--------------------------------------------------------------------------------
1 | # LabDAO Contracts
2 |
3 | ProofOfScience contract on Optimism Sepolia: https://sepolia-optimism.etherscan.io/address/0x7336371ce024de5ba5fd80f53594fe518fb793ae
--------------------------------------------------------------------------------
/contracts/artifacts/@openzeppelin/contracts/token/ERC1155/ERC1155.sol/ERC1155.dbg.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-dbg-1",
3 | "buildInfo": "../../../../../build-info/d810985acf1840a84b239220ccd2e3e4.json"
4 | }
5 |
--------------------------------------------------------------------------------
/contracts/artifacts/@openzeppelin/contracts/token/ERC1155/IERC1155.sol/IERC1155.dbg.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-dbg-1",
3 | "buildInfo": "../../../../../build-info/d810985acf1840a84b239220ccd2e3e4.json"
4 | }
5 |
--------------------------------------------------------------------------------
/contracts/artifacts/@openzeppelin/contracts/token/ERC1155/IERC1155Receiver.sol/IERC1155Receiver.dbg.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-dbg-1",
3 | "buildInfo": "../../../../../build-info/d810985acf1840a84b239220ccd2e3e4.json"
4 | }
5 |
--------------------------------------------------------------------------------
/contracts/artifacts/@openzeppelin/contracts/token/ERC1155/extensions/IERC1155MetadataURI.sol/IERC1155MetadataURI.dbg.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-dbg-1",
3 | "buildInfo": "../../../../../../build-info/d810985acf1840a84b239220ccd2e3e4.json"
4 | }
5 |
--------------------------------------------------------------------------------
/contracts/artifacts/@openzeppelin/contracts/utils/Context.sol/Context.dbg.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-dbg-1",
3 | "buildInfo": "../../../../build-info/d810985acf1840a84b239220ccd2e3e4.json"
4 | }
5 |
--------------------------------------------------------------------------------
/contracts/artifacts/@openzeppelin/contracts/utils/Context.sol/Context.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-artifact-1",
3 | "contractName": "Context",
4 | "sourceName": "@openzeppelin/contracts/utils/Context.sol",
5 | "abi": [],
6 | "bytecode": "0x",
7 | "deployedBytecode": "0x",
8 | "linkReferences": {},
9 | "deployedLinkReferences": {}
10 | }
11 |
--------------------------------------------------------------------------------
/contracts/artifacts/@openzeppelin/contracts/utils/introspection/ERC165.sol/ERC165.dbg.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-dbg-1",
3 | "buildInfo": "../../../../../build-info/d810985acf1840a84b239220ccd2e3e4.json"
4 | }
5 |
--------------------------------------------------------------------------------
/contracts/artifacts/@openzeppelin/contracts/utils/introspection/ERC165.sol/ERC165.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-artifact-1",
3 | "contractName": "ERC165",
4 | "sourceName": "@openzeppelin/contracts/utils/introspection/ERC165.sol",
5 | "abi": [
6 | {
7 | "inputs": [
8 | {
9 | "internalType": "bytes4",
10 | "name": "interfaceId",
11 | "type": "bytes4"
12 | }
13 | ],
14 | "name": "supportsInterface",
15 | "outputs": [
16 | {
17 | "internalType": "bool",
18 | "name": "",
19 | "type": "bool"
20 | }
21 | ],
22 | "stateMutability": "view",
23 | "type": "function"
24 | }
25 | ],
26 | "bytecode": "0x",
27 | "deployedBytecode": "0x",
28 | "linkReferences": {},
29 | "deployedLinkReferences": {}
30 | }
31 |
--------------------------------------------------------------------------------
/contracts/artifacts/@openzeppelin/contracts/utils/introspection/IERC165.sol/IERC165.dbg.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-dbg-1",
3 | "buildInfo": "../../../../../build-info/d810985acf1840a84b239220ccd2e3e4.json"
4 | }
5 |
--------------------------------------------------------------------------------
/contracts/artifacts/@openzeppelin/contracts/utils/introspection/IERC165.sol/IERC165.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-artifact-1",
3 | "contractName": "IERC165",
4 | "sourceName": "@openzeppelin/contracts/utils/introspection/IERC165.sol",
5 | "abi": [
6 | {
7 | "inputs": [
8 | {
9 | "internalType": "bytes4",
10 | "name": "interfaceId",
11 | "type": "bytes4"
12 | }
13 | ],
14 | "name": "supportsInterface",
15 | "outputs": [
16 | {
17 | "internalType": "bool",
18 | "name": "",
19 | "type": "bool"
20 | }
21 | ],
22 | "stateMutability": "view",
23 | "type": "function"
24 | }
25 | ],
26 | "bytecode": "0x",
27 | "deployedBytecode": "0x",
28 | "linkReferences": {},
29 | "deployedLinkReferences": {}
30 | }
31 |
--------------------------------------------------------------------------------
/contracts/artifacts/contracts/ProofOfScience.sol/ProofOfScience.dbg.json:
--------------------------------------------------------------------------------
1 | {
2 | "_format": "hh-sol-dbg-1",
3 | "buildInfo": "../../build-info/d810985acf1840a84b239220ccd2e3e4.json"
4 | }
5 |
--------------------------------------------------------------------------------
/contracts/hardhat.config.js:
--------------------------------------------------------------------------------
1 | require("@nomicfoundation/hardhat-toolbox");
2 | require('dotenv').config();
3 |
4 | /** @type import('hardhat/config').HardhatUserConfig */
5 |
6 | const ALCHEMY_API_KEY = process.env.ALCHEMY_API_KEY;
7 | const WALLET_PRIVATE_KEY = process.env.WALLET_PRIVATE_KEY;
8 | const OPTIMISM_BLOCK_EXPLORER_API_KEY = process.env.OPTIMISM_BLOCK_EXPLORER_API_KEY;
9 |
10 | module.exports = {
11 | solidity: "0.8.20",
12 | networks: {
13 | 'optimism-sepolia': {
14 | url: `https://opt-sepolia.g.alchemy.com/v2/${ALCHEMY_API_KEY}`,
15 | accounts: [WALLET_PRIVATE_KEY],
16 | }
17 | },
18 | etherscan: {
19 | apiKey: OPTIMISM_BLOCK_EXPLORER_API_KEY,
20 | customChains: [
21 | {
22 | network: 'optimism-sepolia',
23 | chainId: 11155420,
24 | urls: {
25 | apiURL: 'https://api-sepolia-optimistic.etherscan.io/api',
26 | }
27 | }
28 | ]
29 | }
30 | };
31 |
--------------------------------------------------------------------------------
/contracts/ignition/modules/ProofOfScience.js:
--------------------------------------------------------------------------------
1 | const { buildModule } = require("@nomicfoundation/hardhat-ignition/modules");
2 |
3 | module.exports = buildModule("ProofOfScience", (m) => {
4 | const proofOfScience = m.contract("ProofOfScience", []);
5 | return { proofOfScience };
6 | });
--------------------------------------------------------------------------------
/contracts/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "hardhat-project",
3 | "devDependencies": {
4 | "@nomicfoundation/hardhat-toolbox": "^5.0.0",
5 | "hardhat": "^2.22.2"
6 | },
7 | "dependencies": {
8 | "@a16z/contracts": "^0.0.9",
9 | "@openzeppelin/contracts": "^5.0.2",
10 | "dotenv": "^16.4.5"
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/docker-compose-gpu.yml:
--------------------------------------------------------------------------------
1 | # Minimal compose for gpu setup
2 | # To bring up network in gpu mode use:
3 | # docker compose -f docker-compose.yml -f docker-compose-gpu.yml up -d --wait --build
4 |
5 | ---
6 |
7 | services:
8 | compute:
9 | deploy:
10 | resources:
11 | reservations:
12 | devices:
13 | - driver: nvidia
14 | count: 1
15 | capabilities: [gpu]
16 |
--------------------------------------------------------------------------------
/docker-compose.private.yml:
--------------------------------------------------------------------------------
1 | # Minimal compose for private setup
2 | # To bring up network in private mode use:
3 | # docker-compose -f docker-compose.yml -f docker-compose-private.yml up -d
4 |
5 | ---
6 |
7 | services:
8 | ipfs:
9 | environment:
10 | IPFS_S3_ENABLED: "false"
11 | IPFS_SWARM_KEY_BASE64: "L2tleS9zd2FybS9wc2svMS4wLjAvCi9iYXNlMTYvCjk0Y2Y4ODFiMDZmZDI5YTgxNDVlMmY2MjNiMmRjNGMwNTU2Y2QxNTIwNWM4YjhjMzg0YWEzOThkY2U4YWFhMzYK"
12 | PRIVATE_IPFS: "true"
13 | LIBP2P_FORCE_PNET: "1"
14 | IPFS_DEBUG: "true"
15 | IPFS_LOGGING: "debug"
16 | IPFS_GATEWAY_PORT: "8888"
17 | volumes:
18 | - private-ipfs:/data/ipfs
19 |
20 | volumes:
21 | private-ipfs:
22 |
--------------------------------------------------------------------------------
/docker/images/bacalhau/Dockerfile:
--------------------------------------------------------------------------------
1 | # syntax=docker/dockerfile:1.4
2 | # Pinned to 1.20
3 | FROM golang:1.20 as build
4 |
5 | # Release tag. Used to build the binary and tag the version.
6 | ARG BACALHAU_VERSION=v1.2.0
7 | ARG TAG
8 |
9 | RUN apt-get update && apt-get -y --no-install-recommends install nodejs npm
10 |
11 | RUN git clone https://github.com/bacalhau-project/bacalhau.git /work
12 |
13 | WORKDIR /work
14 |
15 | RUN git checkout ${BACALHAU_VERSION}
16 | RUN make build-bacalhau
17 | RUN find ./bin -name 'bacalhau' -exec mv -t ./bin {} +
18 |
19 | FROM cgr.dev/chainguard/nvidia-device-plugin
20 | COPY --from=build /work/bin/bacalhau /usr/local/bin/bacalhau
21 | ENV PATH="/usr/local/bin:/usr/bin"
22 | ENTRYPOINT ["bacalhau"]
23 | LABEL org.opencontainers.image.source https://github.com/bacalhau-project/bacalhau
24 | LABEL org.opencontainers.image.title "Bacalhau"
25 | LABEL org.opencontainers.image.description "The Bacalhau network provices decentralised compute for compute over data. See https://bacalhau.org for more info."
26 | LABEL org.opencontainers.image.licenses Apache-2.0
27 | LABEL org.opencontainers.image.url https://bacalhau.org
28 |
--------------------------------------------------------------------------------
/docker/images/ipfs/README.md:
--------------------------------------------------------------------------------
1 | Copied mostly from https://github.com/ipfs/go-ds-s3/tree/master/docker
2 |
--------------------------------------------------------------------------------
/docker/images/ipfs/container-init.d/001-custom-config.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | if [ "${IPFS_DEBUG}" == "true" ]; then
5 | set -x
6 | fi
7 |
8 | ipfs config --json API.HTTPHeaders.Access-Control-Allow-Methods '["PUT", "POST", "GET"]'
9 | ipfs config Pinning.Recursive true
10 | ipfs config --json Swarm.RelayClient.Enabled false
11 |
--------------------------------------------------------------------------------
/docker/images/ipfs/container-init.d/003-bootstrap-rm.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | if [ "${IPFS_DEBUG}" == "true" ]; then
5 | set -x
6 | fi
7 |
8 | if [ "${PRIVATE_IPFS}" == "true" ]; then
9 | echo "Running in private mode, removing bootstrap"
10 | ipfs bootstrap rm --all
11 | fi
12 |
--------------------------------------------------------------------------------
/docker/images/ipfs/container-init.d/004-swarm-base64-key.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 | user=ipfs
5 | repo="$IPFS_PATH"
6 |
7 | if [ "${IPFS_DEBUG}" == "true" ]; then
8 | set -x
9 | fi
10 |
11 |
12 | # Set up the swarm key, if provided
13 | SWARM_KEY_FILE="$repo/swarm.key"
14 | SWARM_KEY_PERM=0600
15 |
16 | # Create a swarm key from a given environment variable
17 | ls -ltra "$SWARM_KEY_FILE" || true
18 |
19 | if [ -n "$IPFS_SWARM_KEY_BASE64" ] && [ "${PRIVATE_IPFS}" == "true" ]; then
20 | echo "Copying swarm key from variable IPFS_SWARM_KEY_BASE64..."
21 | echo "$IPFS_SWARM_KEY_BASE64" | base64 -d >"$SWARM_KEY_FILE" || exit 1
22 | chmod $SWARM_KEY_PERM "$SWARM_KEY_FILE"
23 | fi
24 |
--------------------------------------------------------------------------------
/docker/images/ipfs/container-init.d/005-gateway-port.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | if [ "${IPFS_DEBUG}" == "true" ]; then
5 | set -x
6 | fi
7 |
8 | if [ -n "${IPFS_GATEWAY_PORT}" ]; then
9 | ipfs config Addresses.Gateway /ip4/0.0.0.0/tcp/"${IPFS_GATEWAY_PORT}"
10 | fi
11 |
--------------------------------------------------------------------------------
/docker/images/ipfs/container-init.d/006-datastore.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | if [ "${IPFS_DEBUG}" == "true" ]; then
5 | set -x
6 | fi
7 |
8 | if [ -n "${IPFS_DATASTORE_STORAGEMAX}" ]; then
9 | ipfs config Datastore.StorageMax "${IPFS_DATASTORE_STORAGEMAX}"
10 | fi
11 |
12 | if [ -n "${IPFS_DATASTORE_STORAGEGCWATERMARK}" ]; then
13 | ipfs config Datastore.StorageGCWatermark "${IPFS_DATASTORE_STORAGEGCWATERMARK}"
14 | fi
15 |
16 | if [ -n "${IPFS_DATASTORE_GCPERIOD}" ]; then
17 | ipfs config Datastore.GCPeriod "${IPFS_DATASTORE_GCPERIOD}"
18 | fi
19 |
--------------------------------------------------------------------------------
/docker/images/ipfs/container-init.d/007-remote-pinning-service.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Script to add remote pinning service
3 | # required args:
4 | # IPFS_ADD_REMOTE_PINNING_SERVICE=true/false
5 | # IPFS_REMOTE_PINNING_SERVICE_NAME=userfiendlyname
6 | # IPFS_REMOTE_PINNING_SERVICE_ENDPOINT=endpointurl
7 | # IPFS_REMOTE_PINNING_SERVICE_ACCESS_TOKEN=accesstoken
8 |
9 | set -e
10 |
11 | if [ "${IPFS_DEBUG}" == "true" ]; then
12 | set -x
13 | fi
14 |
15 | if [ "${IPFS_ADD_REMOTE_PINNING_SERVICE}" == "true" ]; then
16 | ipfs pin remote service add ${IPFS_REMOTE_PINNING_SERVICE_NAME} ${IPFS_REMOTE_PINNING_SERVICE_ENDPOINT} ${IPFS_REMOTE_PINNING_SERVICE_ACCESS_TOKEN}
17 | fi
18 |
--------------------------------------------------------------------------------
/docker/images/ipfs/container-init.d/999-debug-config.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | set -e
3 |
4 | if [ "${IPFS_DEBUG}" == "true" ]; then
5 | set -x
6 | echo "Dumping env"
7 | env
8 | cat /data/ipfs/config
9 | ls -ltra /data/ipfs/repo.lock || true
10 | fi
11 |
--------------------------------------------------------------------------------
/docker/ipfs_data/datastore_spec:
--------------------------------------------------------------------------------
1 | {"mounts":[{"mountpoint":"/blocks","path":"blocks","shardFunc":"/repo/flatfs/shard/v1/next-to-last/2","type":"flatfs"},{"mountpoint":"/","path":"datastore","type":"levelds"}],"type":"mount"}
--------------------------------------------------------------------------------
/docker/ipfs_data/swarm.key:
--------------------------------------------------------------------------------
1 | /key/swarm/psk/1.0.0/
2 | /base16/
3 | 94cf881b06fd29a8145e2f623b2dc4c0556cd15205c8b8c384aa398dce8aaa36
4 |
--------------------------------------------------------------------------------
/docs/babel.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | presets: [require.resolve('@docusaurus/core/lib/babel/preset')],
3 | };
4 |
--------------------------------------------------------------------------------
/docs/docs/reference/api.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: API Reference
3 | description: API Reference
4 | sidebar_label: API
5 | sidebar_position: 1
6 | slug: /reference/api
7 | ---
8 |
9 | OpenAPI 3.0 documentation coming soon!
--------------------------------------------------------------------------------
/docs/docs/reference/python.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Python Reference
3 | description: Python Reference
4 | sidebar_label: Python
5 | sidebar_position: 2
6 | slug: /reference/python
7 | ---
8 |
9 | Python library coming soon!
--------------------------------------------------------------------------------
/docs/docs/tutorials/tutorials.md:
--------------------------------------------------------------------------------
1 | ---
2 | title: Drug Discovery Tutorials
3 | description: Lab.Bio Tutorials
4 | sidebar_label: Tutorials
5 | slug: /
6 | ---
7 |
8 | [Lab.Bio](https://lab.bio/) tutorials coming soon!
9 |
10 | For tutorial requests, please [open an issue](https://github.com/labdao/plex/issues/new?labels=tools&title=Tutorial+Request&body=%23%23+Tutorial+Request%0A%0A---%0A%0A%23%23%23+Describe+the+tutorial+you%27d+like%3A%0A%28Your+description+here%29%0A%0A---%0A%0A%23%23%23+Computational+biology+tools+involved%3A%0A%28List+any+specific+computational+biology+tools+that+are+involved+in+this+tutorial%29%0A%0A---%0A%0A%23%23%23+Additional+context+or+information%3A%0A%28Any+extra+information+here%29%0A) on GitHub.
--------------------------------------------------------------------------------
/docs/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "labdao-docs",
3 | "version": "0.0.0",
4 | "private": true,
5 | "scripts": {
6 | "docusaurus": "docusaurus",
7 | "start": "docusaurus start",
8 | "build": "docusaurus build",
9 | "swizzle": "docusaurus swizzle",
10 | "deploy": "docusaurus deploy",
11 | "clear": "docusaurus clear",
12 | "serve": "docusaurus serve",
13 | "write-translations": "docusaurus write-translations",
14 | "write-heading-ids": "docusaurus write-heading-ids",
15 | "typecheck": "tsc"
16 | },
17 | "dependencies": {
18 | "@docsearch/js": "3",
19 | "@docusaurus/core": "2.0.0-beta.18",
20 | "@docusaurus/preset-classic": "2.0.0-beta.18",
21 | "@mdx-js/react": "^1.6.22",
22 | "clsx": "^1.1.1",
23 | "lodash": "^4.17.21",
24 | "mdx-mermaid": "^v1.3.0",
25 | "mermaid": "^9.1.7",
26 | "prism-react-renderer": "^1.3.1",
27 | "react": "^17.0.2",
28 | "react-dom": "^17.0.2"
29 | },
30 | "devDependencies": {
31 | "@docusaurus/module-type-aliases": "2.0.0-beta.18",
32 | "@tsconfig/docusaurus": "^1.0.5",
33 | "asciinema-player": "^3.0.0-rc.1",
34 | "typescript": "^4.6.3"
35 | },
36 | "browserslist": {
37 | "production": [
38 | ">0.5%",
39 | "not dead",
40 | "not op_mini all"
41 | ],
42 | "development": [
43 | "last 1 chrome version",
44 | "last 1 firefox version",
45 | "last 1 safari version"
46 | ]
47 | }
48 | }
49 |
--------------------------------------------------------------------------------
/docs/scripts/ascicast-commands.txt:
--------------------------------------------------------------------------------
1 | ls
2 | ./plex create -t tools/equibind.json -i testdata/binding/abl/ --autoRun=true
3 | ./plex -tool equibind -input-dir testdata/binding/pdbind_processed_size1
4 | ./plex -tool equibind -input-dir testdata/binding/pdbind_processed_size2
5 | ./plex -tool colabfold-mini -input-dir testdata/folding -gpu=true -network=true
6 | ./plex -tool oddt -input-dir testdata/scoring/abl
--------------------------------------------------------------------------------
/docs/scripts/create_ascicast.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # run inside container image with asciinema installed
3 |
4 | # File containing the list of CLI commands to record
5 | commands_file="scripts/ascicast-commands.txt"
6 |
7 | # Display the commands in the file
8 | while IFS= read -r cmd; do
9 | echo "Command: $cmd"
10 | done < "$commands_file"
11 |
12 | # Read commands from the file into an array
13 | commands=()
14 | while IFS= read -r cmd; do
15 | commands+=("$cmd")
16 | done < "$commands_file"
17 |
18 | # Loop through the commands array and create a recording for each one
19 | for cmd in "${commands[@]}"; do
20 | # Create a unique filename for each recording
21 | filename="$(echo "$cmd" | tr -d '[:space:]/' | tr -cd '[:alnum:]._-').cast"
22 |
23 | # Create a script to simulate typing the command character by character
24 | typed_cmd_script="tmp.sh"
25 | echo "#!/bin/bash" > "$typed_cmd_script"
26 | for ((i=0; i<${#cmd}; i++)); do
27 | echo "printf '%s' '${cmd:$i:1}'" >> "$typed_cmd_script"
28 | echo "sleep 0.1" >> "$typed_cmd_script"
29 | done
30 | echo "printf '\n'" >> "$typed_cmd_script"
31 | echo "$cmd" >> "$typed_cmd_script"
32 | echo "exit" >> "$typed_cmd_script"
33 | chmod +x "$typed_cmd_script"
34 |
35 | # Start the recording, execute the command, and then exit the shell
36 | asciinema rec -c "bash $typed_cmd_script" $filename -y -i 2 --overwrite
37 |
38 | # Cleanup the temporary script
39 | rm -f "$typed_cmd_script"
40 | done
--------------------------------------------------------------------------------
/docs/scripts/find_unused_images.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 |
4 | def main():
5 | content_dirs = ['docs'] # Add other content directories if needed
6 |
7 | used_images = set()
8 |
9 | for content_dir in content_dirs:
10 | for root, _, files in os.walk(content_dir):
11 | md_files = [file for file in files if file.endswith(('.md', '.mdx'))]
12 | png_files = [file for file in files if file.endswith('.png')]
13 |
14 | if md_files:
15 | for md_file in md_files:
16 | with open(os.path.join(root, md_file), 'r') as content_file:
17 | content = content_file.read()
18 | used_images.update(re.findall(r'!\[.*?\]\((.*?)\)', content))
19 |
20 | unused_images = [os.path.join(root, png) for png in png_files if png not in used_images]
21 |
22 | if unused_images:
23 | print(f'Unused images in {root}:')
24 | for img in unused_images:
25 | print(img)
26 | print()
27 |
28 | if not used_images:
29 | print('No unused images found.')
30 |
31 | if __name__ == '__main__':
32 | main()
--------------------------------------------------------------------------------
/docs/sidebars.js:
--------------------------------------------------------------------------------
1 | // @ts-check
2 |
3 | const { readdirSync } = require('fs');
4 | const capitalize = require('lodash/capitalize');
5 |
6 | /** @type {import('@docusaurus/plugin-content-docs').SidebarsConfig} */
7 | const sidebars = {
8 | tutorialSidebar: [
9 | {
10 | type: 'doc',
11 | id: 'tutorials/tutorials',
12 | label: 'Tutorials',
13 | },
14 | {
15 | type: 'category',
16 | label: 'Reference',
17 | collapsed: false,
18 | items: [
19 | {
20 | type: 'autogenerated',
21 | dirName: 'reference',
22 | },
23 | ],
24 | },
25 | ],
26 | };
27 |
28 | module.exports = sidebars;
--------------------------------------------------------------------------------
/docs/src/components/AsciinemaPlayer.js:
--------------------------------------------------------------------------------
1 | import BrowserOnly from '@docusaurus/BrowserOnly';
2 | import React, { useEffect, useRef } from 'react';
3 | import 'asciinema-player/dist/bundle/asciinema-player.css';
4 |
5 | const AsciinemaWidget = ({ src, ...asciinemaOptions}) => {
6 | return (
7 | Loading asciinema cast...}>
8 | {() => {
9 | const AsciinemaPlayer = require('asciinema-player');
10 | const ref = useRef(null);
11 |
12 | useEffect(() => {
13 | AsciinemaPlayer.create(src, ref.current, asciinemaOptions);
14 | }, [src]);
15 |
16 | return
;
17 | }}
18 |
19 | );
20 | };
21 |
22 | export default AsciinemaWidget;
--------------------------------------------------------------------------------
/docs/src/components/OpenInColab.js:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 |
3 | const OpenInColab = ({ link }) => (
4 |
5 |
6 |
7 | );
8 |
9 | export default OpenInColab;
--------------------------------------------------------------------------------
/docs/static/.nojekyll:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/docs/static/.nojekyll
--------------------------------------------------------------------------------
/docs/static/img/LabDAO_Favicon_Teal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/docs/static/img/LabDAO_Favicon_Teal.png
--------------------------------------------------------------------------------
/docs/static/img/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/docs/static/img/favicon.ico
--------------------------------------------------------------------------------
/docs/static/img/labdao-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/docs/static/img/labdao-icon.png
--------------------------------------------------------------------------------
/docs/static/img/labdaologo_brandmark_Teal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/docs/static/img/labdaologo_brandmark_Teal.png
--------------------------------------------------------------------------------
/docs/static/img/protein-folding-graphic.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/docs/static/img/protein-folding-graphic.png
--------------------------------------------------------------------------------
/docs/static/img/small-molecule-binding-graphic.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/docs/static/img/small-molecule-binding-graphic.png
--------------------------------------------------------------------------------
/docs/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | // This file is not used in compilation. It is here just for a nice editor experience.
3 | "extends": "@tsconfig/docusaurus/tsconfig.json",
4 | "compilerOptions": {
5 | "baseUrl": "."
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/frontend/.dockerignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # next.js
12 | /.next/
13 | /out/
14 |
15 | # production
16 | /build
17 |
18 | # misc
19 | .DS_Store
20 | *.pem
21 |
22 | # debug
23 | npm-debug.log*
24 | yarn-debug.log*
25 | yarn-error.log*
26 |
27 | # local env files
28 | .env*.local
29 |
30 | # vercel
31 | .vercel
32 |
33 | # typescript
34 | *.tsbuildinfo
35 | next-env.d.ts
36 |
--------------------------------------------------------------------------------
/frontend/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "next",
3 | "plugins": ["simple-import-sort"],
4 | "rules": {
5 | "simple-import-sort/imports": "warn",
6 | "simple-import-sort/exports": "warn",
7 | "react-hooks/exhaustive-deps": "error"
8 | }
9 | }
10 |
--------------------------------------------------------------------------------
/frontend/.gitignore:
--------------------------------------------------------------------------------
1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
2 |
3 | # dependencies
4 | /node_modules
5 | /.pnp
6 | .pnp.js
7 |
8 | # testing
9 | /coverage
10 |
11 | # next.js
12 | /.next/
13 | /out/
14 |
15 | # production
16 | /build
17 |
18 | # misc
19 | .DS_Store
20 | *.pem
21 | .vscode
22 |
23 | # debug
24 | npm-debug.log*
25 | yarn-debug.log*
26 | yarn-error.log*
27 |
28 | # local env files
29 | .env*.local
30 |
31 | # vercel
32 | .vercel
33 |
34 | # typescript
35 | *.tsbuildinfo
36 | next-env.d.ts
37 |
--------------------------------------------------------------------------------
/frontend/README.md:
--------------------------------------------------------------------------------
1 | # Plex Frontend
2 |
3 | ## Get Started
4 |
5 | ```bash
6 | npm install && npm run dev
7 | ```
8 |
9 | ## Add a Component
10 |
11 | Missing a component? Find it here: [shadcn/ui components](https://ui.shadcn.com/docs/components)
12 | Follow the instructions for installing via the CLI.
13 |
14 | ## Data Tables
15 |
16 | Tables use the [react-table](https://react-table.tanstack.com/) library. Good examples on integrating with shadcn can be found here: [shadcn/ui data-table](https://ui.shadcn.com/docs/components/data-table)
17 |
18 | ## Useful Docs
19 |
20 | - [Tailwind CSS](https://tailwindcss.com/docs)
21 | - [shadcn/ui](https://ui.shadcn.com/docs)
22 | - [react-table](https://react-table.tanstack.com/)
23 | - [Lucide Icons](https://lucide.dev/icons/)
24 | - [Next.js](https://nextjs.org/docs/getting-started)
25 |
--------------------------------------------------------------------------------
/frontend/app/(auth)/login/page.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { usePrivy } from "@privy-io/react-auth";
4 | import { LockIcon } from "lucide-react";
5 | import { useRouter } from "next/navigation";
6 |
7 | import PrivyLoginButton from "@/components/auth/PrivyLoginButton";
8 | import { PageLoader } from "@/components/shared/PageLoader";
9 | import { Button } from "@/components/ui/button";
10 |
11 | export default function LoginPage() {
12 | const { ready, authenticated } = usePrivy();
13 | const router = useRouter();
14 |
15 | if (authenticated) {
16 | router.push("/");
17 | }
18 |
19 | return ready && !authenticated ? (
20 |
21 |
22 |
23 |
Log In to Your Lab.Bio Account
24 |
Log In
25 |
26 |
27 | ) : (
28 |
29 | );
30 | }
31 |
--------------------------------------------------------------------------------
/frontend/app/(resources)/infrastructure/page.tsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | export default function InfrastructurePage() {
4 | return ToDo: Add Public Architecture Diagram
;
5 | }
6 |
--------------------------------------------------------------------------------
/frontend/app/(resources)/philosophy/page.tsx:
--------------------------------------------------------------------------------
1 | import React from "react";
2 |
3 | export default function PhilosophyPage() {
4 | return (
5 |
6 |
Axioms
7 |
8 |
9 | Network innovation produces value
10 | This is why we build Internet native interfaces
11 |
12 |
13 | Permissionless Increases Creativity
14 | This is why we write open source software
15 |
16 |
17 | Reproducibility increases network growth
18 | This is why we use content identified storage
19 |
20 |
21 | Fork-ability minimizes gatekeeping
22 | This why we support decentralized infrastructure
23 |
24 |
25 | Accessibility increases community
26 | This is why provide educational material
27 |
28 |
29 |
30 | );
31 | }
32 |
--------------------------------------------------------------------------------
/frontend/app/api/identity-count/route.ts:
--------------------------------------------------------------------------------
1 | /* Core */
2 | import { NextResponse } from 'next/server'
3 |
4 | export async function POST(req: Request, res: Response) {
5 | const body = await req.json()
6 | const { amount = 1 } = body
7 |
8 | // simulate IO latency
9 | await new Promise((r) => setTimeout(r, 500))
10 |
11 | return NextResponse.json({ data: amount })
12 | }
13 |
--------------------------------------------------------------------------------
/frontend/app/experiments/(experiment)/(forms)/ContinuousSwitch.tsx:
--------------------------------------------------------------------------------
1 | import { Badge } from "@/components/ui/badge";
2 | import { FormControl, FormDescription, FormLabel } from "@/components/ui/form";
3 | import { Switch } from "@/components/ui/switch";
4 |
5 | export default function ContinuousSwitch() {
6 | return (
7 |
8 |
9 |
10 |
11 |
12 |
13 | continuous run Coming soon!
14 |
15 | continuously generate datapoints with the current sequence inputs. tURN OFF OR EDIT INPUT TO CANCEL.
16 |
17 |
18 | );
19 | }
20 |
--------------------------------------------------------------------------------
/frontend/app/experiments/(experiment)/ExperimentUIContext.tsx:
--------------------------------------------------------------------------------
1 | import { createContext, useState } from "react";
2 |
3 | interface ExperimentUIContextType {
4 | activeJobUUID: string | undefined;
5 | setActiveJobUUID: React.Dispatch>;
6 | activeCheckpointUrl: string | undefined;
7 | setActiveCheckpointUrl: React.Dispatch>;
8 | modelPanelOpen: boolean;
9 | setModelPanelOpen: React.Dispatch>;
10 | }
11 | export const ExperimentUIContext = createContext({} as ExperimentUIContextType);
12 | export function ExperimentUIContextProvider({ children }: { children: React.ReactNode }) {
13 | const [activeJobUUID, setActiveJobUUID] = useState(undefined);
14 | const [activeCheckpointUrl, setActiveCheckpointUrl] = useState(undefined);
15 | const [modelPanelOpen, setModelPanelOpen] = useState(false);
16 |
17 | return (
18 |
28 | {children}
29 |
30 | );
31 | }
32 |
--------------------------------------------------------------------------------
/frontend/app/experiments/(experiment)/[experimentID]/ExperimentShare.tsx:
--------------------------------------------------------------------------------
1 | import { Share2 } from 'lucide-react';
2 | import React, { useState } from 'react';
3 |
4 | import { Button } from "@/components/ui/button";
5 | import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip";
6 |
7 | const ExperimentShare = ({ experimentID }: { experimentID: string }) => {
8 | const [copied, setCopied] = useState(false);
9 | const currentPageLink = `${process.env.NEXT_PUBLIC_FRONTEND_URL}/experiments/${experimentID}`;
10 |
11 | const copyLinkToClipboard = async () => {
12 | await navigator.clipboard.writeText(currentPageLink);
13 | setCopied(true);
14 | setTimeout(() => {
15 | setCopied(false);
16 | }, 2000);
17 | };
18 |
19 | return (
20 |
21 |
22 |
23 |
24 | Share
25 |
26 |
27 | Copied!
28 |
29 |
30 | );
31 | };
32 |
33 | export default ExperimentShare;
--------------------------------------------------------------------------------
/frontend/app/experiments/(experiment)/layout.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import React, { ReactNode } from "react";
4 |
5 | import { ExperimentUIContextProvider } from "./ExperimentUIContext";
6 |
7 | type LayoutProps = {
8 | children: ReactNode;
9 | };
10 |
11 | export default function Layout({ children }: LayoutProps) {
12 | return (
13 |
14 | {children}
15 |
16 | );
17 | }
18 |
--------------------------------------------------------------------------------
/frontend/app/models/default.tsx:
--------------------------------------------------------------------------------
1 | export default function Default() {
2 | return null;
3 | }
4 |
--------------------------------------------------------------------------------
/frontend/app/models/layout.tsx:
--------------------------------------------------------------------------------
1 | import React, { ReactNode } from "react";
2 |
3 | import { Breadcrumbs } from "@/components/global/Breadcrumbs";
4 |
5 | type LayoutProps = {
6 | children: ReactNode;
7 | list: any;
8 | add: any;
9 | };
10 |
11 | export default async function Layout({ children, list, add }: LayoutProps) {
12 | return (
13 |
14 | {add}
} />
15 | {list}
16 | {children}
17 |
18 | );
19 | }
20 |
--------------------------------------------------------------------------------
/frontend/app/not-found.tsx:
--------------------------------------------------------------------------------
1 | export default function NotFound() {
2 | return (
3 |
4 |
Not Found
5 |
6 | );
7 | }
8 |
--------------------------------------------------------------------------------
/frontend/app/page.tsx:
--------------------------------------------------------------------------------
1 | export default function IndexPage() {
2 | return null; // We redirect to the model list in next.config
3 | }
4 |
--------------------------------------------------------------------------------
/frontend/app/tasks/TaskCard.tsx:
--------------------------------------------------------------------------------
1 | import Image from "next/image";
2 | import Link from "next/link";
3 | import { ReactNode } from "react";
4 |
5 | import { Badge } from "@/components/ui/badge";
6 | import { Card, CardContent, CardTitle } from "@/components/ui/card";
7 |
8 | const CardWithHref = ({ href, children }: { href: string | undefined; children: ReactNode }) =>
9 | href ? (
10 |
11 | {children}
12 |
13 | ) : (
14 |
15 |
16 |
17 | Coming soon
18 |
19 |
20 | {children}
21 |
22 | );
23 |
24 | export default function TaskCard({ name, slug, available }: { name: string; slug: string; available: boolean }) {
25 | return (
26 |
27 |
28 |
29 | {name}
30 |
31 |
32 |
33 |
34 | );
35 | }
36 |
--------------------------------------------------------------------------------
/frontend/app/tasks/TasksMenu.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { DropdownMenuContentProps } from "@radix-ui/react-dropdown-menu";
4 | import Link from "next/link";
5 | import React from "react";
6 |
7 | import { tasks } from "@/app/tasks/taskList";
8 | import { Badge } from "@/components/ui/badge";
9 | import { DropdownMenu, DropdownMenuContent, DropdownMenuItem, DropdownMenuTrigger } from "@/components/ui/dropdown-menu";
10 |
11 | interface TasksMenuProps {
12 | trigger: React.ReactNode;
13 | dropdownMenuContentProps?: DropdownMenuContentProps;
14 | }
15 |
16 | export default function TasksMenu({ dropdownMenuContentProps, trigger }: TasksMenuProps) {
17 | return (
18 | <>
19 |
20 | {trigger}
21 |
22 | {tasks.map((task) => (
23 |
24 |
25 | {task.name}
26 | {!task.available && (
27 |
28 | Coming soon
29 |
30 | )}
31 |
32 |
33 | ))}
34 |
35 |
36 | >
37 | );
38 | }
39 |
--------------------------------------------------------------------------------
/frontend/app/tasks/page.tsx:
--------------------------------------------------------------------------------
1 | import { PlusIcon } from "lucide-react";
2 | import Link from "next/link";
3 |
4 | import { Breadcrumbs } from "@/components/global/Breadcrumbs";
5 | import { Card, CardContent } from "@/components/ui/card";
6 |
7 | import TaskCard from "./TaskCard";
8 | import { tasks } from "./taskList";
9 |
10 | export default function TaskList() {
11 | return (
12 | <>
13 |
14 |
15 |
Select a task to get started
16 |
17 | {tasks.map((task) => (
18 |
19 | ))}
20 |
21 |
22 |
23 |
24 | Suggest a task
25 |
26 |
27 |
28 |
29 |
30 | >
31 | );
32 | }
33 |
--------------------------------------------------------------------------------
/frontend/app/tasks/taskList.ts:
--------------------------------------------------------------------------------
1 | // These could eventually be fetched from the backend, but for now are hardcoded
2 | // Adding a task? Add it's image to /public/images with filename task-.png
3 |
4 | export const tasks = [
5 | {
6 | name: "Protein Binder Design",
7 | slug: "protein-binder-design",
8 | available: true,
9 | },
10 | // {
11 | // // set to true for testing story LAB-1166
12 | // name: "Protein Folding",
13 | // slug: "protein-folding",
14 | // available: true,
15 | // },
16 | // {
17 | // name: "Protein Docking",
18 | // slug: "protein-docking",
19 | // available: false,
20 | // },
21 | // {
22 | // name: "Small Molecule Docking",
23 | // slug: "small-molecule-docking",
24 | // available: false,
25 | // },
26 | {
27 | name: "Community Models",
28 | slug: "community-models",
29 | available: true,
30 | },
31 | ];
32 |
--------------------------------------------------------------------------------
/frontend/components.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://ui.shadcn.com/schema.json",
3 | "style": "default",
4 | "rsc": true,
5 | "tsx": true,
6 | "tailwind": {
7 | "config": "tailwind.config.js",
8 | "css": "./styles/globals.css",
9 | "baseColor": "neutral",
10 | "cssVariables": true
11 | },
12 | "aliases": {
13 | "components": "@/components",
14 | "utils": "@/lib/utils"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/frontend/components/global/Breadcrumbs.tsx:
--------------------------------------------------------------------------------
1 | import Link from "next/link";
2 |
3 | interface BreadcrumbsProps {
4 | items: {
5 | name: string;
6 | href?: string;
7 | }[];
8 | actions?: React.ReactNode;
9 | }
10 |
11 | export function Breadcrumbs({ items, actions }: BreadcrumbsProps) {
12 | return (
13 |
14 |
15 | {items.map((item, idx) => {
16 | if (idx === items.length - 1 || !item.href)
17 | return (
18 |
19 | {item.name}/
20 |
21 | );
22 | return (
23 |
24 | {item.name}/
25 |
26 | );
27 | })}
28 |
29 |
{actions}
30 |
31 | );
32 | }
33 |
--------------------------------------------------------------------------------
/frontend/components/global/Logo.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | const Logo = (props: React.JSX.IntrinsicAttributes & React.SVGProps) => (
3 |
4 |
5 |
6 |
7 |
8 |
9 | );
10 | export default Logo;
11 |
--------------------------------------------------------------------------------
/frontend/components/global/PoweredByLogo.tsx:
--------------------------------------------------------------------------------
1 | import { cn } from "@/lib/utils";
2 | export default function PoweredByLogo({ className }: { className?: string }) {
3 | return (
4 |
5 |
6 |
10 |
11 |
12 | Powered by Convexity Labs
13 |
14 | );
15 | }
16 |
--------------------------------------------------------------------------------
/frontend/components/shared/CopyToClipboard.tsx:
--------------------------------------------------------------------------------
1 | import { CopyIcon } from "lucide-react";
2 | import { useState } from "react";
3 |
4 | import { Button } from "@/components/ui/button";
5 | import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip";
6 | import { cn } from "@/lib/utils";
7 |
8 | interface CopyToClipboardProps {
9 | string: string;
10 | children?: React.ReactNode;
11 | className?: string;
12 | }
13 |
14 | export function CopyToClipboard({ string, children, className }: CopyToClipboardProps) {
15 | const [copied, setCopied] = useState(false);
16 | const copy = async () => {
17 | await navigator.clipboard.writeText(string);
18 | setCopied(true);
19 | setTimeout(() => {
20 | setCopied(false);
21 | }, 2000);
22 | };
23 |
24 | if (!string) return children;
25 |
26 | return (
27 | {
30 | copy();
31 | }}
32 | >
33 | {children && {children} }
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 | Copied!
42 |
43 |
44 |
45 | );
46 | }
47 |
--------------------------------------------------------------------------------
/frontend/components/shared/PageLoader.tsx:
--------------------------------------------------------------------------------
1 | import { Loader2Icon } from "lucide-react";
2 |
3 | import { cn } from "@/lib/utils";
4 |
5 | import Logo from "../global/Logo";
6 |
7 | interface PageLoaderProps {
8 | variant?: "default" | "logo";
9 | className?: string;
10 | }
11 |
12 | export function PageLoader({ variant = "default", className }: PageLoaderProps) {
13 | return (
14 |
15 | {variant === "logo" ? : }
16 |
17 | );
18 | }
19 |
--------------------------------------------------------------------------------
/frontend/components/shared/TruncatedString.tsx:
--------------------------------------------------------------------------------
1 | import { Tooltip, TooltipContent, TooltipProvider, TooltipTrigger } from "@/components/ui/tooltip";
2 |
3 | export function TruncatedString({ value, trimLength = 6 }: { value: string; trimLength?: number }) {
4 | if (!value) return null;
5 | if (value?.length) {
6 | if (value?.length < trimLength * 2) return value;
7 | return (
8 |
9 |
10 |
11 |
12 | {`${value.substring(0, trimLength)}...${value.substring(value.length - trimLength)}`}
13 |
14 |
15 | {value}
16 |
17 |
18 |
19 |
20 | );
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/frontend/components/shared/ViewportWarning.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import { useEffect, useState } from "react";
4 |
5 | import {
6 | AlertDialog,
7 | AlertDialogAction,
8 | AlertDialogContent,
9 | AlertDialogDescription,
10 | AlertDialogFooter,
11 | AlertDialogHeader,
12 | AlertDialogTitle,
13 | } from "@/components/ui/alert-dialog";
14 |
15 | export function ViewportWarning() {
16 | const [open, setOpen] = useState(undefined as boolean | undefined);
17 |
18 | useEffect(() => {
19 | const handleResize = (width: number) => {
20 | width < 900 ? setOpen(true) : setOpen(false);
21 | };
22 |
23 | handleResize(window.innerWidth);
24 | window.addEventListener("resize", () => {
25 | handleResize(window.innerWidth);
26 | });
27 | }, []);
28 |
29 | return (
30 |
31 |
32 |
33 | Optimized for larger screens
34 |
35 | We‘re still working on optimizing the UI for smaller screens. For the best experience, please use a device with a larger screen.
36 |
37 |
38 |
39 | Continue Anyway
40 |
41 |
42 |
43 | );
44 | }
45 |
--------------------------------------------------------------------------------
/frontend/components/ui/boolean-input.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 |
3 | import { cn } from "@/lib/utils";
4 |
5 | export interface CheckboxProps extends React.InputHTMLAttributes {}
6 |
7 | const BooleanInput = React.forwardRef(({ className, ...props }, ref) => {
8 | return (
9 |
18 | );
19 | });
20 | BooleanInput.displayName = "BooleanInput";
21 |
22 | export { BooleanInput };
23 |
--------------------------------------------------------------------------------
/frontend/components/ui/collapsible.tsx:
--------------------------------------------------------------------------------
1 | "use client"
2 |
3 | import * as CollapsiblePrimitive from "@radix-ui/react-collapsible"
4 |
5 | const Collapsible = CollapsiblePrimitive.Root
6 |
7 | const CollapsibleTrigger = CollapsiblePrimitive.CollapsibleTrigger
8 |
9 | const CollapsibleContent = CollapsiblePrimitive.CollapsibleContent
10 |
11 | export { Collapsible, CollapsibleContent,CollapsibleTrigger }
12 |
--------------------------------------------------------------------------------
/frontend/components/ui/data-table-column-header.tsx:
--------------------------------------------------------------------------------
1 | import { Column } from "@tanstack/react-table";
2 | import { ChevronDownIcon, ChevronsUpDownIcon, ChevronUpIcon, EyeOffIcon } from "lucide-react";
3 |
4 | import { cn } from "@/lib/utils";
5 |
6 | import { Button } from "./button";
7 | import { DropdownMenu, DropdownMenuContent, DropdownMenuItem, DropdownMenuSeparator, DropdownMenuTrigger } from "./dropdown-menu";
8 |
9 | interface DataTableColumnHeaderProps extends React.HTMLAttributes {
10 | column: Column;
11 | title: string;
12 | }
13 |
14 | export function DataTableColumnHeader({ column, title, className }: DataTableColumnHeaderProps) {
15 | if (!column.getCanSort()) {
16 | return {title}
;
17 | }
18 |
19 | return (
20 |
21 | {title}
22 | {column.getIsSorted() === "desc" ? (
23 | column.toggleSorting(false)}>
24 |
25 |
26 | ) : column.getIsSorted() === "asc" ? (
27 | column.toggleSorting(true)}>
28 |
29 |
30 | ) : (
31 | column.toggleSorting(false)}>
32 |
33 |
34 | )}
35 |
36 | );
37 | }
38 |
--------------------------------------------------------------------------------
/frontend/components/ui/input.tsx:
--------------------------------------------------------------------------------
1 | import { cva, type VariantProps } from "class-variance-authority";
2 | import * as React from "react";
3 |
4 | import { cn } from "@/lib/utils";
5 |
6 | export interface InputProps extends React.InputHTMLAttributes {}
7 |
8 | const inputVariants = cva(
9 | "flex h-10 w-full rounded-md border bg-background p-3 ring-offset-background [&[type='file']]:p-2 file:border-0 file:bg-transparent file:text-sm file:font-bold placeholder:text-muted-foreground focus-visible:outline-none focus-visible:ring-2 focus-visible:ring-ring focus-visible:ring-offset-2 disabled:cursor-not-allowed disabled:opacity-50",
10 | {
11 | variants: {
12 | variant: {
13 | default: "border-input",
14 | subtle: "border-transparent hover:border-input",
15 | },
16 | },
17 | defaultVariants: {
18 | variant: "default",
19 | },
20 | }
21 | );
22 |
23 | const Input = React.forwardRef>(({ className, variant, type, ...props }, ref) => {
24 | return ;
25 | });
26 | Input.displayName = "Input";
27 |
28 | export { Input };
29 |
--------------------------------------------------------------------------------
/frontend/components/ui/label.tsx:
--------------------------------------------------------------------------------
1 | "use client";
2 |
3 | import * as LabelPrimitive from "@radix-ui/react-label";
4 | import { cva, type VariantProps } from "class-variance-authority";
5 | import * as React from "react";
6 |
7 | import { cn } from "@/lib/utils";
8 |
9 | const labelVariants = cva("leading-none peer-disabled:cursor-not-allowed peer-disabled:opacity-70", {
10 | variants: {
11 | variant: {
12 | default: "font-heading block text-base py-2",
13 | description: "empty:hidden first:ml-0 mx-1 inline-block text-xs text-muted-foreground font-body font-normal",
14 | },
15 | },
16 | defaultVariants: {
17 | variant: "default",
18 | },
19 | });
20 |
21 | const Label = React.forwardRef<
22 | React.ElementRef,
23 | React.ComponentPropsWithoutRef & VariantProps
24 | >(({ className, ...props }, ref) => );
25 | Label.displayName = LabelPrimitive.Root.displayName;
26 |
27 | const LabelDescription = React.forwardRef<
28 | React.ElementRef,
29 | React.ComponentPropsWithoutRef & VariantProps
30 | >(({ className, ...props }, ref) => (
31 |
32 | ));
33 | LabelDescription.displayName = LabelPrimitive.Root.displayName;
34 |
35 | export { Label, LabelDescription };
36 |
--------------------------------------------------------------------------------
/frontend/components/ui/popover.tsx:
--------------------------------------------------------------------------------
1 | "use client"
2 |
3 | import * as PopoverPrimitive from "@radix-ui/react-popover"
4 | import * as React from "react"
5 |
6 | import { cn } from "@/lib/utils"
7 |
8 | const Popover = PopoverPrimitive.Root
9 |
10 | const PopoverTrigger = PopoverPrimitive.Trigger
11 |
12 | const PopoverContent = React.forwardRef<
13 | React.ElementRef,
14 | React.ComponentPropsWithoutRef
15 | >(({ className, align = "center", sideOffset = 4, ...props }, ref) => (
16 |
17 |
27 |
28 | ))
29 | PopoverContent.displayName = PopoverPrimitive.Content.displayName
30 |
31 | export { Popover, PopoverContent,PopoverTrigger }
32 |
--------------------------------------------------------------------------------
/frontend/components/ui/separator.tsx:
--------------------------------------------------------------------------------
1 | "use client"
2 |
3 | import * as SeparatorPrimitive from "@radix-ui/react-separator"
4 | import * as React from "react"
5 |
6 | import { cn } from "@/lib/utils"
7 |
8 | const Separator = React.forwardRef<
9 | React.ElementRef,
10 | React.ComponentPropsWithoutRef
11 | >(
12 | (
13 | { className, orientation = "horizontal", decorative = true, ...props },
14 | ref
15 | ) => (
16 |
27 | )
28 | )
29 | Separator.displayName = SeparatorPrimitive.Root.displayName
30 |
31 | export { Separator }
32 |
--------------------------------------------------------------------------------
/frontend/components/ui/sonner.tsx:
--------------------------------------------------------------------------------
1 | "use client"
2 |
3 | import { useTheme } from "next-themes"
4 | import { Toaster as Sonner } from "sonner"
5 |
6 | type ToasterProps = React.ComponentProps
7 |
8 | const Toaster = ({ ...props }: ToasterProps) => {
9 | const { theme = "system" } = useTheme()
10 |
11 | return (
12 |
28 | )
29 | }
30 |
31 | export { Toaster }
32 |
--------------------------------------------------------------------------------
/frontend/components/ui/switch.tsx:
--------------------------------------------------------------------------------
1 | "use client"
2 |
3 | import * as React from "react"
4 | import * as SwitchPrimitives from "@radix-ui/react-switch"
5 |
6 | import { cn } from "@/lib/utils"
7 |
8 | const Switch = React.forwardRef<
9 | React.ElementRef,
10 | React.ComponentPropsWithoutRef
11 | >(({ className, ...props }, ref) => (
12 |
20 |
25 |
26 | ))
27 | Switch.displayName = SwitchPrimitives.Root.displayName
28 |
29 | export { Switch }
30 |
--------------------------------------------------------------------------------
/frontend/components/ui/textarea.tsx:
--------------------------------------------------------------------------------
1 | import * as React from "react";
2 | import TextareaAutosize, { TextareaAutosizeProps } from "react-textarea-autosize";
3 |
4 | import { cn } from "@/lib/utils";
5 | export interface TextareaProps extends React.TextareaHTMLAttributes {}
6 |
7 | const Textarea = React.forwardRef(({ className, ...props }, ref) => {
8 | return (
9 |
17 | );
18 | });
19 | Textarea.displayName = "Textarea";
20 |
21 | export { Textarea };
22 |
--------------------------------------------------------------------------------
/frontend/components/ui/tooltip.tsx:
--------------------------------------------------------------------------------
1 | "use client"
2 |
3 | import * as TooltipPrimitive from "@radix-ui/react-tooltip"
4 | import * as React from "react"
5 |
6 | import { cn } from "@/lib/utils"
7 |
8 | const TooltipProvider = TooltipPrimitive.Provider
9 |
10 | const Tooltip = TooltipPrimitive.Root
11 |
12 | const TooltipTrigger = TooltipPrimitive.Trigger
13 |
14 | const TooltipContent = React.forwardRef<
15 | React.ElementRef,
16 | React.ComponentPropsWithoutRef
17 | >(({ className, sideOffset = 4, ...props }, ref) => (
18 |
27 | ))
28 | TooltipContent.displayName = TooltipPrimitive.Content.displayName
29 |
30 | export { Tooltip, TooltipContent, TooltipProvider,TooltipTrigger }
31 |
--------------------------------------------------------------------------------
/frontend/components/visualization/Molstar/skin.scss:
--------------------------------------------------------------------------------
1 | $default-background: #f9fafb;
2 | $font-color: #000000;
3 | $hover-font-color: #000000;
4 | $entity-current-font-color: #0343ba;
5 | $msp-btn-remove-background: #de0a28;
6 | $msp-btn-remove-hover-font-color: #de0a28;
7 | $msp-btn-commit-on-font-color: #0343ba;
8 | $entity-badge-font-color: #000000;
9 |
10 | // used in LOG
11 | $log-message: #0cca5d;
12 | $log-info: #5e3673;
13 | $log-warning: #fcc937;
14 | $log-error: #fd354b;
15 |
16 | $logo-background: rgba(0, 0, 0, 0.75);
17 |
18 | @function color-lower-contrast($color, $amount) {
19 | @return darken($color, $amount);
20 | }
21 |
22 | @function color-increase-contrast($color, $amount) {
23 | @return lighten($color, $amount);
24 | }
25 |
26 | @import "molstar/lib/mol-plugin-ui/skin/base/base.scss";
27 |
--------------------------------------------------------------------------------
/frontend/fonts/FKRasterRomanCompact-Blended.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/fonts/FKRasterRomanCompact-Blended.woff2
--------------------------------------------------------------------------------
/frontend/fonts/PPFraktionMono-Bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/fonts/PPFraktionMono-Bold.woff2
--------------------------------------------------------------------------------
/frontend/fonts/PPFraktionMono-BoldItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/fonts/PPFraktionMono-BoldItalic.woff2
--------------------------------------------------------------------------------
/frontend/fonts/PPFraktionMono-Regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/fonts/PPFraktionMono-Regular.woff2
--------------------------------------------------------------------------------
/frontend/fonts/PPFraktionMono-RegularItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/fonts/PPFraktionMono-RegularItalic.woff2
--------------------------------------------------------------------------------
/frontend/fonts/PPNeueMontreal-Bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/fonts/PPNeueMontreal-Bold.woff2
--------------------------------------------------------------------------------
/frontend/fonts/PPNeueMontreal-BoldItalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/fonts/PPNeueMontreal-BoldItalic.woff2
--------------------------------------------------------------------------------
/frontend/fonts/PPNeueMontreal-Italic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/fonts/PPNeueMontreal-Italic.woff2
--------------------------------------------------------------------------------
/frontend/fonts/PPNeueMontreal-Regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/fonts/PPNeueMontreal-Regular.woff2
--------------------------------------------------------------------------------
/frontend/lib/PrivyContext.tsx:
--------------------------------------------------------------------------------
1 | import { User } from '@privy-io/react-auth';
2 | import React from 'react';
3 |
4 | interface AuthState {
5 | user: User | null;
6 | authenticated: boolean;
7 | }
8 |
9 | export const PrivyAuthContext = React.createContext({ user: null, authenticated: false});
--------------------------------------------------------------------------------
/frontend/lib/backendUrl.ts:
--------------------------------------------------------------------------------
1 | export default function backendUrl() {
2 | if(!process.env.NEXT_PUBLIC_BACKEND_URL) {
3 | throw new Error("The environment variable NEXT_PUBLIC_BACKEND_URL must be defined.");
4 | }
5 | return process.env.NEXT_PUBLIC_BACKEND_URL
6 | }
7 |
--------------------------------------------------------------------------------
/frontend/lib/planTemplate.ts:
--------------------------------------------------------------------------------
1 | export interface PlanDetail {
2 | description: string;
3 | }
4 |
5 | export interface PlanTemplate {
6 | details: PlanDetail[];
7 | }
8 |
9 | const getPlanTemplate = (): PlanTemplate => {
10 | return {
11 | details: [
12 | {
13 | description: "{{includedCredits}} compute tokens per month included (about {{numMolecules}} molecules)"
14 | },
15 | {
16 | description: "Every additional token costs {{overageCharge}} USD"
17 | },
18 | {
19 | description: "Cancel subscription any time"
20 | }
21 | ]
22 | };
23 | };
24 |
25 | export default getPlanTemplate;
--------------------------------------------------------------------------------
/frontend/lib/providers.tsx:
--------------------------------------------------------------------------------
1 | 'use client'
2 |
3 | import { Provider } from 'react-redux'
4 |
5 | import { reduxStore } from '@/lib/redux'
6 |
7 | import PrivyProviderWrapper from './PrivyProviderWrapper'
8 |
9 | export const Providers = (props: React.PropsWithChildren) => {
10 | return (
11 |
12 |
13 | {props.children}
14 |
15 |
16 | )
17 | }
--------------------------------------------------------------------------------
/frontend/lib/redux/createAppAsyncThunk.ts:
--------------------------------------------------------------------------------
1 | /* Core */
2 | import { createAsyncThunk } from '@reduxjs/toolkit'
3 |
4 | /* Instruments */
5 | import type { ReduxDispatch,ReduxState } from './store'
6 |
7 | /**
8 | * ? A utility function to create a typed Async Thunk Actions.
9 | */
10 | export const createAppAsyncThunk = createAsyncThunk.withTypes<{
11 | state: ReduxState
12 | dispatch: ReduxDispatch
13 | rejectValue: string
14 | }>()
15 |
--------------------------------------------------------------------------------
/frontend/lib/redux/index.ts:
--------------------------------------------------------------------------------
1 | export * from './slices'
2 | export * from './store'
3 |
--------------------------------------------------------------------------------
/frontend/lib/redux/middleware.ts:
--------------------------------------------------------------------------------
1 | /* Core */
2 | import { createLogger } from 'redux-logger'
3 |
4 | const middleware = [
5 | createLogger({
6 | duration: true,
7 | timestamp: false,
8 | collapsed: true,
9 | colors: {
10 | title: () => '#139BFE',
11 | prevState: () => '#1C5FAF',
12 | action: () => '#149945',
13 | nextState: () => '#A47104',
14 | error: () => '#ff0005',
15 | },
16 | predicate: () => typeof window !== 'undefined',
17 | }),
18 | ]
19 |
20 | export { middleware }
21 |
--------------------------------------------------------------------------------
/frontend/lib/redux/rootReducer.ts:
--------------------------------------------------------------------------------
1 | /* Instruments */
2 | import {
3 | apiKeyAddSlice,
4 | apiKeyListSlice,
5 | fileAddSlice,
6 | fileListSlice,
7 | experimentAddSlice,
8 | experimentDetailSlice,
9 | experimentListSlice,
10 | experimentUpdateSlice,
11 | jobDetailSlice,
12 | stripeCheckoutSlice,
13 | modelAddSlice,
14 | modelDetailSlice,
15 | modelListSlice,
16 | transactionsSummarySlice,
17 | userSlice,
18 | experimentNamesSlice,
19 | } from "./slices";
20 |
21 | export const reducer = {
22 | user: userSlice.reducer,
23 | fileAdd: fileAddSlice.reducer,
24 | fileList: fileListSlice.reducer,
25 | modelAdd: modelAddSlice.reducer,
26 | modelList: modelListSlice.reducer,
27 | modelDetail: modelDetailSlice.reducer,
28 | experimentAdd: experimentAddSlice.reducer,
29 | experimentList: experimentListSlice.reducer,
30 | experimentDetail: experimentDetailSlice.reducer,
31 | experimentUpdate: experimentUpdateSlice.reducer,
32 | experimentNames: experimentNamesSlice.reducer,
33 | jobDetail: jobDetailSlice.reducer,
34 | apiKeyAdd: apiKeyAddSlice.reducer,
35 | apiKeyList: apiKeyListSlice.reducer,
36 | stripeCheckout: stripeCheckoutSlice.reducer,
37 | transactionsSummary: transactionsSummarySlice.reducer,
38 | };
39 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/apiKeyAddSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth"
2 | import backendUrl from "lib/backendUrl"
3 |
4 | export interface ApiKeyPayload {
5 | name: string;
6 | // Add any other properties that are needed for creating an API key
7 | }
8 |
9 | export const createApiKey = async (
10 | payload: ApiKeyPayload
11 | ): Promise => {
12 | let authToken
13 | try {
14 | authToken = await getAccessToken();
15 | } catch (error) {
16 | console.log("Failed to get access token: ", error)
17 | throw new Error("Authentication failed")
18 | }
19 |
20 | const response = await fetch(`${backendUrl()}/api-keys`, {
21 | method: 'POST',
22 | headers: {
23 | 'Authorization': `Bearer ${authToken}`,
24 | 'Content-Type': 'application/json',
25 | },
26 | body: JSON.stringify(payload),
27 | })
28 |
29 | if (!response.ok) {
30 | const errorResult = await response.json();
31 | throw new Error(errorResult.message || "Failed to create API Key")
32 | }
33 |
34 | const result = await response.json()
35 | return result;
36 | }
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/apiKeyAddSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './slice'
3 | export * from './thunks'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/apiKeyAddSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from "@/lib/redux"
2 |
3 | export const selectApiKeyAdd = (state: ReduxState) => state.apiKeyAdd.key
4 | export const selectApiKeyAddLoading = (state: ReduxState) => state.apiKeyAdd.loading
5 | export const selectApiKeyAddError = (state: ReduxState) => state.apiKeyAdd.error
6 | export const selectApiKeyAddSuccess = (state: ReduxState) => state.apiKeyAdd.success
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/apiKeyAddSlice/slice.ts:
--------------------------------------------------------------------------------
1 | import { createSlice, PayloadAction } from "@reduxjs/toolkit";
2 |
3 | interface ApiKeyAddSliceState {
4 | key: string;
5 | name: string;
6 | loading: boolean;
7 | error: string | null;
8 | success: boolean;
9 | }
10 |
11 | const initialState: ApiKeyAddSliceState = {
12 | key: "",
13 | name: "",
14 | loading: false,
15 | error: null,
16 | success: false,
17 | };
18 |
19 | export const apiKeyAddSlice = createSlice({
20 | name: "apiKeyAdd",
21 | initialState,
22 | reducers: {
23 | setApiKey: (state, action: PayloadAction) => {
24 | state.key = action.payload;
25 | },
26 | setApiKeyName: (state, action: PayloadAction) => {
27 | state.name = action.payload;
28 | },
29 | setApiKeyLoading: (state, action: PayloadAction) => {
30 | state.loading = action.payload;
31 | },
32 | setApiKeyError: (state, action: PayloadAction) => {
33 | state.error = action.payload;
34 | },
35 | setApiKeySuccess: (state, action: PayloadAction) => {
36 | state.success = action.payload;
37 | },
38 | // You might need additional reducers depending on your form and API key creation logic
39 | },
40 | });
41 |
42 | export const { setApiKey, setApiKeyName, setApiKeyLoading, setApiKeyError, setApiKeySuccess } =
43 | apiKeyAddSlice.actions;
44 |
45 | export default apiKeyAddSlice.reducer;
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/apiKeyAddSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from '@/lib/redux/createAppAsyncThunk'
2 |
3 | import { createApiKey } from './asyncActions'
4 | import { setApiKey, setApiKeyError, setApiKeySuccess } from './slice'
5 |
6 | interface ApiKeyPayload {
7 | name: string;
8 | // Add any other properties that are needed for creating an API key
9 | }
10 |
11 | export const addApiKeyThunk = createAppAsyncThunk(
12 | 'apiKey/addApiKey',
13 | async (payload: ApiKeyPayload, { dispatch }) => {
14 | try {
15 | const response = await createApiKey(payload)
16 | if (response && response.id) { // Assuming the response will have an 'id' field on successful creation
17 | dispatch(setApiKeySuccess(true))
18 | dispatch(setApiKey(response.key)) // Assuming you want to store the key in the state
19 | } else {
20 | console.log('Failed to add API key.', response)
21 | dispatch(setApiKeyError('Failed to add API key.'))
22 | }
23 | return response
24 | } catch (error: unknown) {
25 | console.log('Failed to add API key.', error)
26 | if (error instanceof Error) {
27 | dispatch(setApiKeyError(error.message))
28 | } else {
29 | dispatch(setApiKeyError('Failed to add API key.'))
30 | }
31 | return false
32 | }
33 | }
34 | )
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/apiKeyListSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth";
2 | import backendUrl from "lib/backendUrl";
3 |
4 | export const listApiKeys = async (): Promise => {
5 | let authToken;
6 | try {
7 | authToken = await getAccessToken();
8 | console.log('authToken: ', authToken);
9 | } catch (error) {
10 | console.log('Failed to get access token: ', error);
11 | throw new Error("Authentication failed");
12 | }
13 |
14 | const requestUrl = `${backendUrl()}/api-keys`;
15 | const requestOptions = {
16 | method: 'GET',
17 | headers: {
18 | 'Authorization': `Bearer ${authToken}`,
19 | 'Content-Type': 'application/json',
20 | },
21 | };
22 | const response = await fetch(requestUrl, requestOptions);
23 |
24 | if (!response.ok) {
25 | let errorText = "Failed to list API Keys";
26 | try {
27 | const errorResult = await response.json();
28 | errorText = errorResult.message || errorText;
29 | console.log(errorText);
30 | } catch (e) {
31 | console.log('Failed to parse error response: ', e);
32 | }
33 | throw new Error(errorText);
34 | }
35 |
36 | const result = await response.json();
37 | return result;
38 | }
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/apiKeyListSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './slice'
3 | export * from './thunks'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/apiKeyListSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from '@/lib/redux'
2 |
3 | export const selectApiKeyList = (state: ReduxState) => state.apiKeyList.apiKeys
4 | export const selectApiKeyListLoading = (state: ReduxState) => state.apiKeyList.loading
5 | export const selectApiKeyListSuccess = (state: ReduxState) => state.apiKeyList.success
6 | export const selectApiKeyListError = (state: ReduxState) => state.apiKeyList.error
7 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/apiKeyListSlice/slice.ts:
--------------------------------------------------------------------------------
1 | import { createSlice, PayloadAction } from '@reduxjs/toolkit';
2 |
3 | export interface ApiKey {
4 | key: string;
5 | scope: string;
6 | createdAt: string;
7 | expiresAt: string;
8 | }
9 |
10 | interface ApiKeyListSliceState {
11 | apiKeys: ApiKey[];
12 | loading: boolean;
13 | error: string | null;
14 | success: boolean;
15 | }
16 |
17 | const initialState: ApiKeyListSliceState = {
18 | apiKeys: [],
19 | loading: false,
20 | error: null,
21 | success: false,
22 | };
23 |
24 | export const apiKeyListSlice = createSlice({
25 | name: 'ApiKeyList',
26 | initialState,
27 | reducers: {
28 | setApiKeyList: (state, action: PayloadAction) => {
29 | state.apiKeys = action.payload;
30 | },
31 | setApiKeyListLoading: (state, action: PayloadAction) => {
32 | state.loading = action.payload;
33 | },
34 | setApiKeyListError: (state, action: PayloadAction) => {
35 | state.error = action.payload;
36 | },
37 | setApiKeyListSuccess: (state, action: PayloadAction) => {
38 | state.success = action.payload;
39 | },
40 | },
41 | });
42 |
43 | export const {
44 | setApiKeyList,
45 | setApiKeyListLoading,
46 | setApiKeyListError,
47 | setApiKeyListSuccess,
48 | } = apiKeyListSlice.actions;
49 |
50 | export default apiKeyListSlice.reducer;
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/apiKeyListSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from '@/lib/redux/createAppAsyncThunk'
2 |
3 | import { listApiKeys } from './asyncActions'
4 | import { setApiKeyList, setApiKeyListError, setApiKeyListSuccess } from './slice'
5 |
6 | export const apiKeyListThunk = createAppAsyncThunk(
7 | 'apiKey/apiKeyList',
8 | async (_, { dispatch }) => {
9 | try {
10 | const response = await listApiKeys()
11 | if (response) {
12 | dispatch(setApiKeyListSuccess(true))
13 | dispatch(setApiKeyList(response))
14 | } else {
15 | console.log('Failed to list API Keys.', response)
16 | dispatch(setApiKeyListError('Failed to list API Keys.'))
17 | }
18 | return response
19 | } catch (error: unknown) {
20 | console.log('Failed to list API Keys.', error)
21 | if (error instanceof Error) {
22 | dispatch(setApiKeyListError(error.message))
23 | } else {
24 | dispatch(setApiKeyListError('Failed to list API Keys.'))
25 | }
26 | return false
27 | }
28 | }
29 | )
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentAddSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './slice'
3 | export * from './thunks'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentAddSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from '@/lib/redux'
2 |
3 | export const selectExperimentAddName = (state: ReduxState) => state.experimentAdd.name
4 | export const selectExperimentAddModel = (state: ReduxState) => state.experimentAdd.model
5 | export const selectExperimentAddKwargs = (state: ReduxState) => state.experimentAdd.kwargs
6 | export const selectExperimentAddLoading = (state: ReduxState) => state.experimentAdd.loading
7 | export const selectExperimentAddError = (state: ReduxState) => state.experimentAdd.error
8 | export const selectExperimentAddID = (state: ReduxState) => state.experimentAdd.ID
9 | export const selectExperimentAddSuccess = (state: ReduxState) => state.experimentAdd.success
10 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentDetailSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './slice'
3 | export * from './thunks'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentDetailSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from '@/lib/redux'
2 |
3 | export const selectExperimentDetail = (state: ReduxState) => state.experimentDetail.experiment
4 | export const selectExperimentDetailLoading = (state: ReduxState) => state.experimentDetail.loading
5 | export const selectExperimentDetailSuccess = (state: ReduxState) => state.experimentDetail.success
6 | export const selectExperimentDetailError = (state: ReduxState) => state.experimentDetail.error
7 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentDetailSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from "@/lib/redux/createAppAsyncThunk";
2 |
3 | import { getExperiment, patchExperiment } from "./asyncActions";
4 | import { setExperimentDetail, setExperimentDetailError, setExperimentDetailLoading, setExperimentDetailSuccess } from "./slice";
5 |
6 | export const experimentDetailThunk = createAppAsyncThunk("experiment/experimentDetail", async (experimentID: string, { dispatch }) => {
7 | try {
8 | dispatch(setExperimentDetailLoading(true));
9 | const responseData = await getExperiment(experimentID);
10 | dispatch(setExperimentDetailSuccess(true));
11 | dispatch(setExperimentDetail(responseData));
12 | dispatch(setExperimentDetailLoading(false));
13 | return responseData;
14 | } catch (error: unknown) {
15 | console.log("Failed to get Experiment.", error);
16 | if (error instanceof Error) {
17 | dispatch(setExperimentDetailError(error.message));
18 | } else {
19 | dispatch(setExperimentDetailError("Failed to get Experiment."));
20 | }
21 | dispatch(setExperimentDetailLoading(false));
22 | return false;
23 | }
24 | });
25 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentListSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth";
2 | import backendUrl from "lib/backendUrl"
3 |
4 | export const listExperiments = async (walletAddress: string): Promise => {
5 | let authToken;
6 | try {
7 | authToken = await getAccessToken()
8 | } catch (error) {
9 | console.log('Failed to get access token: ', error)
10 | throw new Error("Authentication failed");
11 | }
12 |
13 | const requestUrl = `${backendUrl()}/experiments?walletAddress=${encodeURIComponent(walletAddress)}`;
14 | const requestOptions = {
15 | method: 'GET',
16 | headers: {
17 | 'Authorization': `Bearer ${authToken}`,
18 | 'Content-Type': 'application/json',
19 | },
20 | };
21 | const response = await fetch(requestUrl, requestOptions);
22 |
23 | if (!response) {
24 | let errorText = "Failed to list Experiments";
25 | try {
26 | console.log(errorText);
27 | } catch (e) {
28 | // Parsing JSON failed, retain the default error message.
29 | }
30 | throw new Error(errorText);
31 | }
32 |
33 | const result = await response.json();
34 | return result;
35 | };
36 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentListSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './slice'
3 | export * from './thunks'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentListSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from '@/lib/redux'
2 |
3 | export const selectExperimentList = (state: ReduxState) => state.experimentList.experiments
4 | export const selectExperimentListLoading = (state: ReduxState) => state.experimentList.loading
5 | export const selectExperimentListSuccess = (state: ReduxState) => state.experimentList.success
6 | export const selectExperimentListError = (state: ReduxState) => state.experimentList.error
7 | export const selectCategorizedExperiments = (state: ReduxState) => state.experimentList.categorizedExperiments
8 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentNamesSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | // redux/experimentNames/asyncActions.ts
2 | import { getAccessToken } from "@privy-io/react-auth";
3 | import backendUrl from "lib/backendUrl";
4 |
5 | export const listExperimentNames = async (walletAddress: string): Promise => {
6 | let authToken;
7 | try {
8 | authToken = await getAccessToken();
9 | } catch (error) {
10 | console.log('Failed to get access token: ', error);
11 | throw new Error("Authentication failed");
12 | }
13 |
14 | const requestUrl = `${backendUrl()}/experiments?fields=name&walletAddress=${encodeURIComponent(walletAddress)}`;
15 | const requestOptions = {
16 | method: 'GET',
17 | headers: {
18 | 'Authorization': `Bearer ${authToken}`,
19 | 'Content-Type': 'application/json',
20 | },
21 | };
22 | const response = await fetch(requestUrl, requestOptions);
23 |
24 | if (!response) {
25 | let errorText = "Failed to list Experiments";
26 | try {
27 | console.log(errorText);
28 | } catch (e) {
29 | // Parsing JSON failed, retain the default error message.
30 | }
31 | throw new Error(errorText);
32 | }
33 |
34 | const result = await response.json();
35 | return result;
36 | };
37 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentNamesSlice/index.ts:
--------------------------------------------------------------------------------
1 | // redux/experimentNames/index.ts
2 | export * from './selectors';
3 | export * from './slice';
4 | export * from './thunks';
5 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentNamesSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | // redux/experimentNames/selectors.ts
2 | import type { ReduxState } from '@/lib/redux'; // Correct the path as needed
3 |
4 | export const selectExperimentNames = (state: ReduxState) => state.experimentNames.names;
5 | export const selectExperimentNamesLoading = (state: ReduxState) => state.experimentNames.loading;
6 | export const selectExperimentNamesError = (state: ReduxState) => state.experimentNames.error;
7 | export const selectExperimentNamesSuccess = (state: ReduxState) => state.experimentNames.success;
8 | export const selectCategorizedExperimentNames = (state: ReduxState) => state.experimentNames.categorizedExperimentNames;
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentUpdateSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth";
2 | import backendUrl from "lib/backendUrl";
3 |
4 | export const updateExperiment = async (experimentId: string, data: { name?: string; public?: boolean; }): Promise => {
5 | let authToken;
6 | try {
7 | authToken = await getAccessToken()
8 | } catch (error) {
9 | console.log('Failed to get access token: ', error)
10 | throw new Error("Authentication failed");
11 | }
12 |
13 | const requestUrl = `${backendUrl()}/experiments/${experimentId}`;
14 | const requestOptions = {
15 | method: 'PUT',
16 | headers: {
17 | 'Authorization': `Bearer ${authToken}`,
18 | 'Content-Type': 'application/json',
19 | },
20 | body: JSON.stringify(data)
21 | };
22 |
23 | try {
24 | const response = await fetch(requestUrl, requestOptions);
25 | if (!response.ok) {
26 | throw new Error(`Failed to update Experiment: ${response.statusText}`);
27 | }
28 | return await response.json();
29 | } catch (error) {
30 | console.error('Failed to update Experiment:', error);
31 | throw new Error('Failed to update Experiment');
32 | }
33 | };
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentUpdateSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './slice'
3 | export * from './thunks'
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentUpdateSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from '@/lib/redux'
2 |
3 | export const selectExperimentUpdateLoading = (state: ReduxState) => state.experimentUpdate.loading;
4 | export const selectExperimentUpdateError = (state: ReduxState) => state.experimentUpdate.error;
5 | export const selectExperimentUpdateSuccess = (state: ReduxState) => state.experimentUpdate.success;
6 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentUpdateSlice/slice.ts:
--------------------------------------------------------------------------------
1 | import { createSlice, PayloadAction } from "@reduxjs/toolkit";
2 |
3 | export interface ExperimentUpdateSliceState {
4 | loading: boolean;
5 | error: string | null;
6 | success: boolean;
7 | }
8 |
9 | const initialState: ExperimentUpdateSliceState = {
10 | loading: false,
11 | error: null,
12 | success: false,
13 | };
14 |
15 | export const experimentUpdateSlice = createSlice({
16 | name: "ExperimentUpdate",
17 | initialState,
18 | reducers: {
19 | setExperimentUpdateLoading: (state, action: PayloadAction) => {
20 | state.loading = action.payload;
21 | },
22 | setExperimentUpdateError: (state, action: PayloadAction) => {
23 | state.error = action.payload;
24 | },
25 | setExperimentUpdateSuccess: (state, action: PayloadAction) => {
26 | state.success = action.payload;
27 | },
28 | },
29 | });
30 |
31 | export const {
32 | setExperimentUpdateLoading,
33 | setExperimentUpdateError,
34 | setExperimentUpdateSuccess,
35 | } = experimentUpdateSlice.actions;
36 |
37 | export default experimentUpdateSlice.reducer;
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/experimentUpdateSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from 'lib/redux/createAppAsyncThunk';
2 | import { AppDispatch } from 'lib/redux/store';
3 |
4 | import { updateExperiment } from './asyncActions';
5 | import { setExperimentUpdateError, setExperimentUpdateLoading, setExperimentUpdateSuccess } from './slice';
6 |
7 | interface UpdateExperimentArgs {
8 | experimentId: string;
9 | updates: {
10 | name?: string;
11 | public?: boolean;
12 | };
13 | }
14 |
15 | export const experimentUpdateThunk = createAppAsyncThunk(
16 | 'experiment/updateExperiment',
17 | async ({ experimentId, updates }: UpdateExperimentArgs, { dispatch }: { dispatch: AppDispatch }) => {
18 | dispatch(setExperimentUpdateLoading(true));
19 | try {
20 | const result = await updateExperiment(experimentId, updates);
21 | dispatch(setExperimentUpdateSuccess(true));
22 | return result;
23 | } catch (error) {
24 | dispatch(setExperimentUpdateError(error instanceof Error ? error.toString() : 'Failed to update Experiment.'));
25 | return { error: error instanceof Error ? error.toString() : 'Failed to update Experiment.' };
26 | } finally {
27 | dispatch(setExperimentUpdateLoading(false));
28 | }
29 | }
30 | );
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/fileAddSlice/actions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth"
2 | import backendUrl from "lib/backendUrl"
3 |
4 | export const saveFileToServer = async (
5 | file: File,
6 | metadata: { [key: string]: any },
7 | isPublic: boolean
8 | ): Promise<{ filename: string, id: string }> => {
9 | const formData = new FormData()
10 | formData.append('file', file, file.name)
11 | formData.append('filename', file.name)
12 | formData.append('public', (isPublic ?? false).toString())
13 |
14 | for (const key in metadata) {
15 | formData.append(key, metadata[key])
16 | }
17 |
18 | let authToken
19 | try {
20 | authToken = await getAccessToken();
21 | } catch (error) {
22 | console.log("Failed to get access token: ", error)
23 | throw new Error("Authentication failed")
24 | }
25 |
26 | const response = await fetch(`${backendUrl()}/files`, {
27 | method: 'POST',
28 | headers: {
29 | 'Authorization': `Bearer ${authToken}`,
30 | },
31 | body: formData,
32 | })
33 |
34 | if (!response.ok) {
35 | let errorMsg = 'An error occurred while uploading the file'
36 | try {
37 | const errorResult = await response.json()
38 | errorMsg = errorResult.message || errorMsg;
39 | } catch (e) {
40 | // Parsing JSON failed, retain the default error message.
41 | }
42 | console.log('errorMsg', errorMsg)
43 | throw new Error(errorMsg)
44 | }
45 |
46 | const result = await response.json()
47 | console.log('result', result)
48 | return result
49 | }
50 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/fileAddSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './dataSlice'
2 | export * from './selectors'
3 | export * from './thunks'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/fileAddSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from "@/lib/redux";
2 |
3 | export const selectFilename = (state: ReduxState) => state.fileAdd.filename;
4 | export const selectID = (state: ReduxState) => state.fileAdd.id;
5 | export const selectFileError = (state: ReduxState) => state.fileAdd.error;
6 | export const selectFileIsLoading = (state: ReduxState) => state.fileAdd.isLoading;
7 | export const selectDateFileIsUploaded = (state: ReduxState) => state.fileAdd.isUploaded;
8 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/fileAddSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from "@/lib/redux/createAppAsyncThunk";
2 |
3 | import { saveFileToServer } from "./actions";
4 | import { setIdDataSlice, setFileError, setFilenameDataSlice } from "./dataSlice";
5 |
6 | interface FilePayload {
7 | file: File;
8 | metadata: { [key: string]: any };
9 | isPublic: boolean;
10 | handleSuccess: (id: string) => void;
11 | }
12 |
13 | export const saveFileAsync = createAppAsyncThunk(
14 | "file/saveFile",
15 | async ({ file, metadata, isPublic, handleSuccess }: FilePayload, { dispatch }) => {
16 | try {
17 | const response = await saveFileToServer(file, metadata, isPublic);
18 | if (response.id) {
19 | handleSuccess(response.id);
20 | } else {
21 | dispatch(setFileError("Failed to save file."));
22 | }
23 | return response;
24 | } catch (error: unknown) {
25 | const errorMessage =
26 | typeof error === "object" && error !== null && "message" in error
27 | ? (error as { message?: string }).message
28 | : "An error occurred while saving file.";
29 |
30 | dispatch(setFileError(errorMessage || "An error occurred while saving file."));
31 | }
32 | }
33 | );
34 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/fileListSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth"
2 | import backendUrl from "lib/backendUrl"
3 |
4 | export const listFiles = async ({ page = 1, pageSize = 50, filters = {} }: { page?: number, pageSize?: number, filters?: Record }): Promise => {
5 | const queryParams = new URLSearchParams({ ...filters, page: page.toString(), pageSize: pageSize.toString() });
6 | let authToken;
7 | try {
8 | authToken = await getAccessToken()
9 | } catch (error) {
10 | console.log('Failed to get access token: ', error)
11 | throw new Error("Authentication failed");
12 | }
13 |
14 | const response = await fetch(`${backendUrl()}/files?${queryParams}`, {
15 | method: 'Get',
16 | headers: {
17 | 'Authorization': `Bearer ${authToken}`,
18 | 'Content-Type': 'application/json',
19 | },
20 | })
21 |
22 | if (!response.ok) {
23 | throw new Error("Failed to list Files");
24 | }
25 |
26 | return await response.json();
27 | }
28 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/fileListSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './slice'
3 | export * from './thunks'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/fileListSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from '@/lib/redux'
2 |
3 | export const selectFileList = (state: ReduxState) => state.fileList.files
4 | export const selectFileListPagination = (state: ReduxState) => state.fileList.pagination
5 | export const selectFileListLoading = (state: ReduxState) => state.fileList.status === 'loading';
6 | export const selectFileListSuccess = (state: ReduxState) => state.fileList.success
7 | export const selectFileListError = (state: ReduxState) => state.fileList.error
8 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/fileListSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from '@/lib/redux/createAppAsyncThunk'
2 |
3 | import { listFiles } from './asyncActions'
4 | import {
5 | setFileList,
6 | setFileListError,
7 | setFileListPagination,
8 | setFileListSuccess
9 | } from './slice'
10 |
11 | export const fileListThunk = createAppAsyncThunk(
12 | 'files/listFiles',
13 | async (arg: Partial<{ page: number, pageSize: number, filters: Record }> = { page: 1, pageSize: 50, filters: {} }, { dispatch }) => {
14 | const { page = 1, pageSize = 50, filters = {} } = arg;
15 | try {
16 | const response = await listFiles({ page, pageSize, filters });
17 | if (response) {
18 | dispatch(setFileListSuccess(true));
19 | dispatch(setFileList(response.data));
20 | dispatch(setFileListPagination(response.pagination));
21 | } else {
22 | console.log('Failed to list Files.', response);
23 | dispatch(setFileListError('Failed to list Files.'));
24 | }
25 | return response;
26 | } catch (error: unknown) {
27 | console.log('Failed to list Files.', error);
28 | if (error instanceof Error) {
29 | dispatch(setFileListError(error.message));
30 | } else {
31 | dispatch(setFileListError('Failed to list Files.'));
32 | }
33 | return false;
34 | }
35 | }
36 | )
37 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/index.ts:
--------------------------------------------------------------------------------
1 | export * from "./apiKeyAddSlice";
2 | export * from "./apiKeyListSlice";
3 | export * from "./fileAddSlice";
4 | export * from "./fileListSlice";
5 | export * from "./experimentAddSlice";
6 | export * from "./experimentDetailSlice";
7 | export * from "./experimentListSlice";
8 | export * from "./experimentUpdateSlice";
9 | export * from "./jobDetailSlice";
10 | export * from "./stripeCheckoutSlice";
11 | export * from "./modelAddSlice";
12 | export * from "./modelDetailSlice";
13 | export * from "./modelListSlice";
14 | export * from "./transactionsSummarySlice";
15 | export * from "./userSlice";
16 | export * from "./experimentNamesSlice";
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/jobDetailSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth";
2 | import backendUrl from "lib/backendUrl"
3 |
4 | export const getJob = async (jobID: string): Promise => {
5 | let authToken;
6 | try {
7 | authToken = await getAccessToken()
8 | } catch (error) {
9 | console.log('Failed to get access token: ', error)
10 | throw new Error("Authentication failed");
11 | }
12 |
13 | const response = await fetch(`${backendUrl()}/jobs/${jobID}`, {
14 | method: 'Get',
15 | headers: {
16 | 'Authorization': `Bearer ${authToken}`,
17 | 'Content-Type': 'application/json',
18 | },
19 | })
20 |
21 | if (!response.ok) {
22 | throw new Error(`Failed to get job: ${response.status} ${response.statusText}`);
23 | }
24 |
25 | const result = await response.json()
26 | return result
27 | }
28 |
29 | export const patchJob = async (jobID: string): Promise => {
30 | let authToken;
31 | try {
32 | authToken = await getAccessToken()
33 | } catch (error) {
34 | console.log('Failed to get access token: ', error)
35 | throw new Error("Authentication failed");
36 | }
37 |
38 | const response = await fetch(`${backendUrl()}/jobs/${jobID}`, {
39 | method: 'PATCH',
40 | headers: {
41 | 'Authorization': `Bearer ${authToken}`,
42 | 'Content-Type': 'application/json',
43 | },
44 | })
45 |
46 | if (!response.ok) {
47 | throw new Error(`Failed to patch job: ${response.status} ${response.statusText}`);
48 | }
49 |
50 | const result = await response.json()
51 | return result
52 | }
53 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/jobDetailSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './slice'
3 | export * from './thunks'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/jobDetailSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from '@/lib/redux'
2 |
3 | export const selectJobDetail = (state: ReduxState) => state.jobDetail.job
4 | export const selectJobDetailLoading = (state: ReduxState) => state.jobDetail.loading
5 | export const selectJobDetailSuccess = (state: ReduxState) => state.jobDetail.success
6 | export const selectJobDetailError = (state: ReduxState) => state.jobDetail.error
7 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/jobDetailSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from '@/lib/redux/createAppAsyncThunk'
2 |
3 | import { getJob, patchJob } from './asyncActions'
4 | import { setJobDetail, setJobDetailError, setJobDetailLoading,setJobDetailSuccess } from './slice'
5 |
6 |
7 | export const jobDetailThunk = createAppAsyncThunk(
8 | 'job/jobDetail',
9 | async (jobID: string, { dispatch }) => {
10 | dispatch(setJobDetailLoading(true))
11 | try {
12 | const responseData = await getJob(jobID)
13 | dispatch(setJobDetailSuccess(true))
14 | dispatch(setJobDetail(responseData))
15 | dispatch(setJobDetailLoading(false))
16 | return responseData
17 | } catch (error: unknown) {
18 | console.log('Failed to get Job.', error)
19 | if (error instanceof Error) {
20 | dispatch(setJobDetailError(error.message))
21 | } else {
22 | dispatch(setJobDetailError('Failed to get Job.'))
23 | }
24 | dispatch(setJobDetailLoading(false))
25 | return false
26 | }
27 | }
28 | )
29 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelAddSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth"
2 | import backendUrl from "lib/backendUrl"
3 |
4 | export const createModel = async (
5 | payload: { modelJson: { [key: string]: any } }
6 | ): Promise => {
7 | let authToken
8 | try {
9 | authToken = await getAccessToken();
10 | } catch (error) {
11 | console.log("Failed to get access token: ", error)
12 | throw new Error("Authentication failed")
13 | }
14 |
15 | const response = await fetch(`${backendUrl()}/models`, {
16 | method: 'POST',
17 | headers: {
18 | 'Authorization': `Bearer ${authToken}`,
19 | 'Content-Type': 'application/json',
20 | },
21 | body: JSON.stringify(payload),
22 | })
23 |
24 | if (!response) {
25 | throw new Error("Failed to create Model")
26 | }
27 |
28 | const result = await response.json()
29 | return result;
30 | }
31 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelAddSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './thunks'
3 | export * from './slice'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelAddSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from '@/lib/redux'
2 |
3 | export const selectAddModelJson = (state: ReduxState) => state.modelAdd.modelJson
4 | export const selectAddModelError = (state: ReduxState) => state.modelAdd.error
5 | export const selectAddModelLoading = (state: ReduxState) => state.modelAdd.loading
6 | export const selectAddModelSuccess = (state: ReduxState) => state.modelAdd.success
7 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelAddSlice/slice.ts:
--------------------------------------------------------------------------------
1 | import { createSlice, PayloadAction } from '@reduxjs/toolkit'
2 |
3 | interface ModelAddSliceState {
4 | modelJson: string
5 | loading: boolean
6 | error: string | null
7 | success: boolean
8 | }
9 |
10 | const initialState: ModelAddSliceState = {
11 | modelJson: '',
12 | loading: false,
13 | error: null,
14 | success: false,
15 | }
16 |
17 | export const modelAddSlice = createSlice({
18 | name: 'modelAdd',
19 | initialState,
20 | reducers: {
21 | setAddModelJson: (state, action: PayloadAction) => {
22 | state.modelJson = action.payload
23 | },
24 | setAddModelError: (state, action: PayloadAction) => {
25 | state.error = action.payload
26 | },
27 | setAddModelLoading: (state, action: PayloadAction) => {
28 | state.loading = action.payload
29 | },
30 | setAddModelSuccess: (state, action: PayloadAction) => {
31 | state.success = action.payload
32 | }
33 | }
34 | })
35 |
36 | export const {
37 | setAddModelJson,
38 | setAddModelError,
39 | setAddModelLoading,
40 | setAddModelSuccess,
41 | } = modelAddSlice.actions
42 |
43 | export default modelAddSlice.reducer
44 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelAddSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from '@/lib/redux/createAppAsyncThunk'
2 |
3 | import { createModel } from './asyncActions'
4 | import { setAddModelError, setAddModelSuccess } from './slice'
5 |
6 | interface ModelPayload {
7 | modelJson: { [key: string]: any }
8 | }
9 |
10 | export const createModelThunk = createAppAsyncThunk(
11 | 'model/addModel',
12 | async ({ modelJson }: ModelPayload, { dispatch }) => {
13 | try {
14 | const response = await createModel({ modelJson })
15 | if (response && response.id) {
16 | dispatch(setAddModelSuccess(true))
17 | } else {
18 | console.log('Failed to add model.', response)
19 | dispatch(setAddModelError('Failed to add model.'))
20 | }
21 | return response
22 | } catch (error: unknown) {
23 | console.log('Failed to add model.', error)
24 | if (error instanceof Error) {
25 | dispatch(setAddModelError(error.message))
26 | } else {
27 | dispatch(setAddModelError('Failed to add model.'))
28 | }
29 | return false
30 | }
31 | }
32 | )
33 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelDetailSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth"
2 | import backendUrl from "lib/backendUrl"
3 |
4 | export const getModel = async (ID: string): Promise => {
5 | let authToken
6 | try {
7 | authToken = await getAccessToken();
8 | } catch (error) {
9 | console.log("Failed to get access token: ", error)
10 | throw new Error("Authentication failed")
11 | }
12 |
13 | const response = await fetch(`${backendUrl()}/models/${ID}`, {
14 | method: "Get",
15 | headers: {
16 | "Authorization": `Bearer ${authToken}`,
17 | "Content-Type": "application/json",
18 | },
19 | })
20 |
21 | if (!response.ok) {
22 | throw new Error(`Failed to get model: ${response.status} ${response.statusText}`);
23 | }
24 |
25 | const result = await response.json();
26 | return result;
27 | }
28 |
29 | export const patchModel = async (ID: string): Promise => {
30 | const response = await fetch(`${backendUrl()}/models/${ID}`, {
31 | method: "PATCH",
32 | headers: {
33 | "Content-Type": "application/json",
34 | },
35 | })
36 |
37 | if (!response.ok) {
38 | throw new Error(`Failed to patch model: ${response.status} ${response.statusText}`);
39 | }
40 |
41 | const result = await response.json();
42 | return result;
43 | }
44 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelDetailSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './slice'
3 | export * from './thunks'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelDetailSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from "@/lib/redux";
2 |
3 | export const selectModelDetail = (state: ReduxState) => state.modelDetail.model;
4 | export const selectModelDetailLoading = (state: ReduxState) => state.modelDetail.loading;
5 | export const selectModelDetailSuccess = (state: ReduxState) => state.modelDetail.success;
6 | export const selectModelDetailError = (state: ReduxState) => state.modelDetail.error;
7 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelListSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth";
2 | import backendUrl from "lib/backendUrl"
3 |
4 | export const listModels = async (taskSlug?: string): Promise => {
5 | const url = taskSlug ? `${backendUrl()}/models?taskCategory=${encodeURIComponent(taskSlug)}` : `${backendUrl()}/models`;
6 | const authToken = await getAccessToken()
7 | const response = await fetch(url, {
8 | method: 'Get',
9 | headers: {
10 | 'Authorization': `Bearer ${authToken}`,
11 | 'Content-Type': 'application/json',
12 | },
13 | })
14 |
15 | if (!response) {
16 | throw new Error("Failed to list Models")
17 | }
18 |
19 | const result = await response.json()
20 | return result;
21 | }
22 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelListSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './slice'
3 | export * from './thunks'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelListSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from '@/lib/redux'
2 |
3 | export const selectModelList = (state: ReduxState) => state.modelList.models
4 | export const selectModelListLoading = (state: ReduxState) => state.modelList.loading
5 | export const selectModelListSuccess = (state: ReduxState) => state.modelList.success
6 | export const selectModelListError = (state: ReduxState) => state.modelList.error
7 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelListSlice/slice.ts:
--------------------------------------------------------------------------------
1 | import { createSlice, PayloadAction } from "@reduxjs/toolkit";
2 |
3 | import { ModelDetail } from "@/lib/redux";
4 |
5 | interface ModelListSliceState {
6 | models: ModelDetail[];
7 | loading: boolean;
8 | error: string | null;
9 | success: boolean;
10 | }
11 |
12 | const initialState: ModelListSliceState = {
13 | models: [],
14 | loading: false,
15 | error: null,
16 | success: false,
17 | };
18 |
19 | export const modelListSlice = createSlice({
20 | name: "modelList",
21 | initialState,
22 | reducers: {
23 | setModelList: (state, action: PayloadAction) => {
24 | state.models = action.payload;
25 | },
26 | setModelListLoading: (state, action: PayloadAction) => {
27 | state.loading = action.payload;
28 | },
29 | setModelListError: (state, action: PayloadAction) => {
30 | state.error = action.payload;
31 | },
32 | setModelListSuccess: (state, action: PayloadAction) => {
33 | state.success = action.payload;
34 | },
35 | resetModelList: () => {
36 | return initialState;
37 | },
38 | },
39 | });
40 |
41 | export const { setModelList, setModelListLoading, setModelListError, setModelListSuccess, resetModelList } = modelListSlice.actions;
42 |
43 | export default modelListSlice.reducer;
44 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/modelListSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from '@/lib/redux/createAppAsyncThunk'
2 |
3 | import { AppDispatch } from '../..'
4 | import { listModels } from './asyncActions'
5 | import { setModelList,setModelListError, setModelListSuccess } from './slice'
6 |
7 |
8 | export const modelListThunk = createAppAsyncThunk(
9 | 'model/listModels',
10 | async (taskSlug: string | undefined, { dispatch }) => {
11 | console.log('modelListThunk')
12 | try {
13 | const response = await listModels(taskSlug)
14 | if (response) {
15 | dispatch(setModelListSuccess(true))
16 | dispatch(setModelList(response))
17 | } else {
18 | console.log('Failed to list Models.', response)
19 | dispatch(setModelListError('Failed to list Models.'))
20 | }
21 | return response
22 | } catch (error: unknown) {
23 | console.log('Failed to list Experiments.', error)
24 | if (error instanceof Error) {
25 | dispatch(setModelListError(error.message))
26 | } else {
27 | dispatch(setModelListError('Failed to add model.'))
28 | }
29 | return false
30 | }
31 | }
32 | )
33 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/stripeCheckoutSlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth";
2 | import backendUrl from "lib/backendUrl";
3 |
4 | interface CheckoutPayload {
5 | modelId: string;
6 | scatteringMethod: string;
7 | kwargs: string;
8 | }
9 |
10 | export const getCheckoutURL = async (): Promise => {
11 | let authToken;
12 | try {
13 | authToken = await getAccessToken();
14 | } catch (error) {
15 | console.log("Failed to get access token: ", error);
16 | throw new Error("Authentication failed");
17 | }
18 |
19 | const response = await fetch(`${backendUrl()}/stripe/checkout`, {
20 | method: "POST",
21 | headers: {
22 | Authorization: `Bearer ${authToken}`,
23 | "Content-Type": "application/json",
24 | },
25 | body: JSON.stringify({
26 | success_url: `${window.location.origin}/subscription/manage`,
27 | cancel_url: `${window.location.origin}/checkout/cancel`,
28 | }),
29 | });
30 |
31 | if (!response.ok) {
32 | throw new Error(`Problem getting checkout URL: ${response.status} ${response.statusText}`);
33 | }
34 |
35 | const result = await response.json();
36 | return result;
37 | };
38 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/stripeCheckoutSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from "./selectors";
2 | export * from "./slice";
3 | export * from "./thunks";
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/stripeCheckoutSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from "@/lib/redux";
2 |
3 | export const selectStripeCheckoutUrl = (state: ReduxState) => state.stripeCheckout.url;
4 | export const selectStripeCheckoutLoading = (state: ReduxState) => state.stripeCheckout.loading;
5 | export const selectStripeCheckoutSuccess = (state: ReduxState) => state.stripeCheckout.success;
6 | export const selectStripeCheckoutError = (state: ReduxState) => state.stripeCheckout.error;
7 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/stripeCheckoutSlice/slice.ts:
--------------------------------------------------------------------------------
1 | import { createSlice, PayloadAction } from "@reduxjs/toolkit";
2 |
3 | export interface StripePayload {
4 | url: string | null;
5 | }
6 |
7 | export interface StripeCheckoutSliceState {
8 | url: string | null;
9 | loading: boolean;
10 | error: string | null;
11 | success: boolean;
12 | }
13 |
14 | const initialState: StripeCheckoutSliceState = {
15 | url: null,
16 | loading: false,
17 | error: null,
18 | success: false,
19 | };
20 |
21 | export const stripeCheckoutSlice = createSlice({
22 | name: "stripeCheckout",
23 | initialState,
24 | reducers: {
25 | setStripeCheckoutUrl: (state, action: PayloadAction) => {
26 | state.url = action.payload?.url || null;
27 | },
28 | setStripeCheckoutLoading: (state, action: PayloadAction) => {
29 | state.loading = action.payload;
30 | },
31 | setStripeCheckoutError: (state, action: PayloadAction) => {
32 | state.error = action.payload;
33 | },
34 | setStripeCheckoutSuccess: (state, action: PayloadAction) => {
35 | state.success = action.payload;
36 | },
37 | },
38 | });
39 |
40 | export const { setStripeCheckoutUrl, setStripeCheckoutLoading, setStripeCheckoutError, setStripeCheckoutSuccess } = stripeCheckoutSlice.actions;
41 |
42 | export default stripeCheckoutSlice.reducer;
43 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/stripeCheckoutSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from "@/lib/redux/createAppAsyncThunk";
2 |
3 | import { getCheckoutURL } from "./asyncActions";
4 | import { setStripeCheckoutError, setStripeCheckoutLoading, setStripeCheckoutSuccess, setStripeCheckoutUrl } from "./slice";
5 |
6 | export const stripeCheckoutThunk = createAppAsyncThunk("stripe/checkout", async (_, { dispatch }) => {
7 | dispatch(setStripeCheckoutError(null));
8 | dispatch(setStripeCheckoutLoading(true));
9 | try {
10 | // const responseData = await getCheckoutURL();
11 | // dispatch(setStripeCheckoutSuccess(true));
12 | // dispatch(setStripeCheckoutUrl(responseData));
13 | // dispatch(setStripeCheckoutLoading(false));
14 | // return responseData;
15 | } catch (error: unknown) {
16 | console.log("Problem getting checkout URL", error);
17 | if (error instanceof Error) {
18 | dispatch(setStripeCheckoutError(error.message));
19 | } else {
20 | dispatch(setStripeCheckoutError("Problem getting checkout URL"));
21 | }
22 | dispatch(setStripeCheckoutLoading(false));
23 | return false;
24 | }
25 | });
26 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/transactionsSummarySlice/asyncActions.ts:
--------------------------------------------------------------------------------
1 | import { getAccessToken } from "@privy-io/react-auth";
2 | import backendUrl from "lib/backendUrl";
3 |
4 | export const getTransactionsSummary = async (): Promise => {
5 | let authToken;
6 | try {
7 | authToken = await getAccessToken();
8 | } catch (error) {
9 | console.log("Failed to get access token: ", error);
10 | throw new Error("Authentication failed");
11 | }
12 |
13 | const response = await fetch(`${backendUrl()}/transactions-summary`, {
14 | method: "Get",
15 | headers: {
16 | Authorization: `Bearer ${authToken}`,
17 | "Content-Type": "application/json",
18 | },
19 | });
20 |
21 | if (!response.ok) {
22 | throw new Error(`Problem fetching transactions: ${response.status} ${response.statusText}`);
23 | }
24 |
25 | const result = await response.json();
26 | return result;
27 | };
28 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/transactionsSummarySlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from "./selectors";
2 | export * from "./slice";
3 | export * from "./thunks";
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/transactionsSummarySlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from "@/lib/redux";
2 |
3 | export const selectTransactionsSummary = (state: ReduxState) => state.transactionsSummary.summary;
4 | export const selectTransactionsSummaryLoading = (state: ReduxState) => state.transactionsSummary.loading;
5 | export const selectTransactionsSummarySuccess = (state: ReduxState) => state.transactionsSummary.success;
6 | export const selectTransactionsSummaryError = (state: ReduxState) => state.transactionsSummary.error;
7 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/transactionsSummarySlice/slice.ts:
--------------------------------------------------------------------------------
1 | import { createSlice, PayloadAction } from "@reduxjs/toolkit";
2 |
3 | export interface TransactionsSummary {
4 | tokens: number | null;
5 | balance: number | null;
6 | }
7 |
8 | export interface TransactionsSummarySliceState {
9 | summary: TransactionsSummary;
10 | loading: boolean;
11 | error: string | null;
12 | success: boolean;
13 | }
14 |
15 | const initialState: TransactionsSummarySliceState = {
16 | summary: {
17 | tokens: null,
18 | balance: null,
19 | },
20 | loading: false,
21 | error: null,
22 | success: false,
23 | };
24 |
25 | export const transactionsSummarySlice = createSlice({
26 | name: "jobDetail",
27 | initialState,
28 | reducers: {
29 | setTransactionsSummary: (state, action: PayloadAction) => {
30 | state.summary = action.payload;
31 | },
32 | setTransactionsSummaryLoading: (state, action: PayloadAction) => {
33 | state.loading = action.payload;
34 | },
35 | setTransactionsSummaryError: (state, action: PayloadAction) => {
36 | state.error = action.payload;
37 | },
38 | setTransactionsSummarySuccess: (state, action: PayloadAction) => {
39 | state.success = action.payload;
40 | },
41 | },
42 | });
43 |
44 | export const { setTransactionsSummary, setTransactionsSummaryLoading, setTransactionsSummaryError, setTransactionsSummarySuccess } =
45 | transactionsSummarySlice.actions;
46 |
47 | export default transactionsSummarySlice.reducer;
48 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/transactionsSummarySlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from "@/lib/redux/createAppAsyncThunk";
2 |
3 | import { getTransactionsSummary } from "./asyncActions";
4 | import { setTransactionsSummary, setTransactionsSummaryError, setTransactionsSummaryLoading, setTransactionsSummarySuccess } from "./slice";
5 |
6 | export const transactionsSummaryThunk = createAppAsyncThunk("stripe/checkout", async (_, { dispatch }) => {
7 | dispatch(setTransactionsSummaryError(null));
8 | dispatch(setTransactionsSummaryLoading(true));
9 | try {
10 | const responseData = await getTransactionsSummary();
11 | dispatch(setTransactionsSummarySuccess(true));
12 | dispatch(setTransactionsSummary(responseData));
13 | dispatch(setTransactionsSummaryLoading(false));
14 | return responseData;
15 | } catch (error: unknown) {
16 | console.log("Problem getting checkout URL", error);
17 | if (error instanceof Error) {
18 | dispatch(setTransactionsSummaryError(error.message));
19 | } else {
20 | dispatch(setTransactionsSummaryError("Problem getting checkout URL"));
21 | }
22 | dispatch(setTransactionsSummaryLoading(false));
23 | return false;
24 | }
25 | });
26 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/userSlice/index.ts:
--------------------------------------------------------------------------------
1 | export * from './selectors'
2 | export * from './thunks'
3 | export * from './userSlice'
4 |
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/userSlice/selectors.ts:
--------------------------------------------------------------------------------
1 | import type { ReduxState } from "@/lib/redux";
2 |
3 | export const selectUserError = (state: ReduxState) => state.user.error;
4 | export const selectUserWalletAddress = (state: ReduxState) => state.user.walletAddress;
5 | export const selectUserDID = (state: ReduxState) => state.user.did;
6 | export const selectUserTier = (state: ReduxState) => state.user.tier;
7 | export const selectUserIsAdmin = (state: ReduxState) => state.user.isAdmin;
8 | export const selectUserSubscriptionStatus = (state: ReduxState) => state.user.subscriptionStatus;
9 | export const selectIsUserSubscribed = (state: ReduxState) => state.user.subscriptionStatus === 'active';
--------------------------------------------------------------------------------
/frontend/lib/redux/slices/userSlice/thunks.ts:
--------------------------------------------------------------------------------
1 | import { createAppAsyncThunk } from '@/lib/redux/createAppAsyncThunk'
2 |
3 | import { fetchUserData, saveUserDataToServer } from './actions'
4 |
5 | interface UserPayload {
6 | walletAddress: string,
7 | }
8 |
9 | export const saveUserAsync = createAppAsyncThunk(
10 | 'user/saveUserDataToServer',
11 | async ({walletAddress}: {walletAddress: string}) => {
12 | const result = await saveUserDataToServer(walletAddress);
13 | return result;
14 | }
15 | )
16 |
17 | export const fetchUserDataAsync = createAppAsyncThunk(
18 | 'user/fetchUserData',
19 | async () => {
20 | const result = await fetchUserData();
21 | return result;
22 | }
23 | )
24 |
25 | export const refreshUserDataThunk = createAppAsyncThunk(
26 | 'user/refreshUserData',
27 | async (_, { dispatch }) => {
28 | try {
29 | await dispatch(fetchUserDataAsync()).unwrap();
30 | } catch (error) {
31 | console.error('Failed to refresh user data: ', error);
32 | throw error;
33 | }
34 | }
35 | )
--------------------------------------------------------------------------------
/frontend/lib/redux/store.ts:
--------------------------------------------------------------------------------
1 | /* Core */
2 | import { type Action,configureStore, type ThunkAction } from '@reduxjs/toolkit'
3 | import {
4 | type TypedUseSelectorHook,
5 | useDispatch as useReduxDispatch,
6 | useSelector as useReduxSelector,
7 | } from 'react-redux'
8 |
9 | import { middleware } from './middleware'
10 | /* Instruments */
11 | import { reducer } from './rootReducer'
12 |
13 | export const reduxStore = configureStore({
14 | reducer,
15 | middleware: (getDefaultMiddleware) => {
16 | return getDefaultMiddleware().concat(middleware)
17 | },
18 | })
19 | export const useDispatch = () => useReduxDispatch()
20 | export const useSelector: TypedUseSelectorHook = useReduxSelector
21 |
22 | /* Types */
23 | export type AppDispatch = typeof reduxStore.dispatch
24 | export type ReduxStore = typeof reduxStore
25 | export type ReduxState = ReturnType
26 | export type ReduxDispatch = typeof reduxStore.dispatch
27 | export type ReduxThunkAction = ThunkAction<
28 | ReturnType,
29 | ReduxState,
30 | unknown,
31 | Action
32 | >
33 |
--------------------------------------------------------------------------------
/frontend/lib/utils.ts:
--------------------------------------------------------------------------------
1 | import { type ClassValue, clsx } from "clsx";
2 | import { twMerge } from "tailwind-merge";
3 |
4 | export function cn(...inputs: ClassValue[]) {
5 | return twMerge(clsx(inputs));
6 | }
7 |
8 | export function formatCurrency(amount: number, currency = "USD") {
9 | return `${new Intl.NumberFormat("en-US", {
10 | style: "currency",
11 | currency,
12 | }).format(amount)} ${currency}`;
13 | }
14 |
--------------------------------------------------------------------------------
/frontend/next.config.mjs:
--------------------------------------------------------------------------------
1 | /** @type {import('next').NextConfig} */
2 | const nextConfig = {
3 | reactStrictMode: true,
4 | // Molstar makes prod build fail with swcMinify enabled
5 | // https://github.com/molstar/molstar/issues/1046
6 | // https://github.com/vercel/next.js/issues/52373
7 | swcMinify: false,
8 | output: "standalone",
9 | async redirects() {
10 | return [
11 | {
12 | source: "/",
13 | destination: "/experiments/new/protein-binder-design",
14 | permanent: false,
15 | },
16 | {
17 | source: "/tasks/:path*",
18 | destination: "/experiments/new/:path*",
19 | permanent: false,
20 | },
21 | {
22 | source: "/experiments/new",
23 | destination: "/experiments/new/protein-binder-design",
24 | permanent: false,
25 | },
26 | {
27 | source: "/checkout/success",
28 | destination: "/experiments/new/protein-binder-design",
29 | permanent: false,
30 | },
31 | {
32 | source: "/checkout/cancel",
33 | destination: "/experiments/new/protein-binder-design",
34 | permanent: false,
35 | },
36 | ];
37 | },
38 | };
39 |
40 | export default nextConfig;
41 |
--------------------------------------------------------------------------------
/frontend/postcss.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | plugins: {
3 | tailwindcss: {},
4 | autoprefixer: {},
5 | },
6 | }
7 |
--------------------------------------------------------------------------------
/frontend/public/browserconfig.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | #6bdbad
7 |
8 |
9 |
10 |
--------------------------------------------------------------------------------
/frontend/public/icons/android-chrome-192x192.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/android-chrome-192x192.png
--------------------------------------------------------------------------------
/frontend/public/icons/android-chrome-512x512.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/android-chrome-512x512.png
--------------------------------------------------------------------------------
/frontend/public/icons/apple-touch-icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/apple-touch-icon.png
--------------------------------------------------------------------------------
/frontend/public/icons/favicon-16x16.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/favicon-16x16.png
--------------------------------------------------------------------------------
/frontend/public/icons/favicon-32x32.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/favicon-32x32.png
--------------------------------------------------------------------------------
/frontend/public/icons/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/favicon.ico
--------------------------------------------------------------------------------
/frontend/public/icons/mstile-144x144.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/mstile-144x144.png
--------------------------------------------------------------------------------
/frontend/public/icons/mstile-150x150.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/mstile-150x150.png
--------------------------------------------------------------------------------
/frontend/public/icons/mstile-310x150.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/mstile-310x150.png
--------------------------------------------------------------------------------
/frontend/public/icons/mstile-310x310.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/mstile-310x310.png
--------------------------------------------------------------------------------
/frontend/public/icons/mstile-70x70.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/icons/mstile-70x70.png
--------------------------------------------------------------------------------
/frontend/public/images/task-community-models.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/images/task-community-models.png
--------------------------------------------------------------------------------
/frontend/public/images/task-protein-binder-design.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/images/task-protein-binder-design.png
--------------------------------------------------------------------------------
/frontend/public/images/task-protein-docking.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/images/task-protein-docking.png
--------------------------------------------------------------------------------
/frontend/public/images/task-protein-folding.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/images/task-protein-folding.png
--------------------------------------------------------------------------------
/frontend/public/images/task-small-molecule-docking.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/frontend/public/images/task-small-molecule-docking.png
--------------------------------------------------------------------------------
/frontend/public/site.webmanifest:
--------------------------------------------------------------------------------
1 | {
2 | "name": "",
3 | "short_name": "",
4 | "icons": [
5 | {
6 | "src": "/icons/android-chrome-192x192.png",
7 | "sizes": "192x192",
8 | "type": "image/png"
9 | },
10 | {
11 | "src": "/icons/android-chrome-512x512.png",
12 | "sizes": "512x512",
13 | "type": "image/png"
14 | }
15 | ],
16 | "theme_color": "#ffffff",
17 | "background_color": "#ffffff",
18 | "display": "standalone"
19 | }
20 |
--------------------------------------------------------------------------------
/frontend/run.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -eoux pipefail
4 |
5 | echo "Dummping envs into file for nodejs"
6 |
7 | env > /app/.env.${NODE_ENV:-local}
8 |
9 | exec node server.js
10 |
--------------------------------------------------------------------------------
/frontend/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://json.schemastore.org/tsconfig",
3 | "compilerOptions": {
4 | "baseUrl": ".",
5 | "paths": { "@/*": ["./*"] },
6 | "target": "ESNext",
7 | "lib": ["DOM", "DOM.Iterable", "ESNext"],
8 | "allowJs": true,
9 | "skipLibCheck": true,
10 | "strict": true,
11 | "forceConsistentCasingInFileNames": true,
12 | "noEmit": true,
13 | "esModuleInterop": true,
14 | "module": "ESNEXT",
15 | "moduleResolution": "node",
16 | "resolveJsonModule": true,
17 | "isolatedModules": true,
18 | "jsx": "preserve",
19 | "incremental": true,
20 | "plugins": [{ "name": "next" }]
21 | },
22 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
23 | "exclude": ["node_modules"]
24 | }
25 |
--------------------------------------------------------------------------------
/gateway/README.md:
--------------------------------------------------------------------------------
1 | # Setup
2 |
3 | * Install [docker](https://docs.docker.com/engine/install/)
4 | * Define necessary env variables
5 | ```
6 | NEXT_PUBLIC_BACKEND_URL=http://localhost:8080
7 | FRONTEND_URL=http://localhost:3000
8 | POSTGRES_PASSWORD=MAKE_UP_SOMETHING_RANDOM
9 | POSTGRES_USER=labdao
10 | POSTGRES_DB=labdao
11 | POSTGRES_HOST=localhost
12 | ```
13 | * Recommended: Install [direnv](https://direnv.net/). With it installed you can create `.env` file with the above environment variables and have them automagically set when you descend into the folder.
14 |
15 | # Start the database
16 |
17 | ```
18 | docker compose up -d
19 | ```
20 |
21 | Note: New docker installation include docker compose, older installations required you install docker-compose separately and run `docker-compose up -d`
22 |
23 | # Frontend start command
24 |
25 | from ./gateway/frontend
26 | ```
27 | npm run dev
28 | ```
29 |
30 | # Backend start command
31 |
32 | from ./gateway
33 | ```
34 | go run app.go
35 | ```
36 |
--------------------------------------------------------------------------------
/gateway/handlers/healthcheck.go:
--------------------------------------------------------------------------------
1 | package handlers
2 |
3 | import (
4 | "fmt"
5 | "net/http"
6 | )
7 |
8 | func HealthCheckHandler() http.HandlerFunc {
9 | return func(w http.ResponseWriter, r *http.Request) {
10 | w.WriteHeader(http.StatusOK)
11 | fmt.Fprintf(w, "Healthy")
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/gateway/migrations/10_update_other_to_community_models.up.sql:
--------------------------------------------------------------------------------
1 | UPDATE tools
2 | SET task_category = 'community-models'
3 | WHERE task_category = 'other-models';
4 |
5 | ALTER TABLE tools
6 | ALTER COLUMN task_category SET DEFAULT 'community-models';
--------------------------------------------------------------------------------
/gateway/migrations/11_add_s3_info_to_jobs.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs
2 | DROP COLUMN job_uuid;
--------------------------------------------------------------------------------
/gateway/migrations/11_add_s3_info_to_jobs.up.sql:
--------------------------------------------------------------------------------
1 | -- change the file name later
2 | ALTER TABLE jobs
3 | ADD COLUMN job_uuid UUID;
--------------------------------------------------------------------------------
/gateway/migrations/12_populate_generated_datafile_wallet_address.up.sql:
--------------------------------------------------------------------------------
1 | WITH mismatched_wallets AS (
2 | SELECT df.cid, j.wallet_address
3 | FROM data_files df
4 | INNER JOIN job_output_files jof ON df.cid = jof.data_file_c_id
5 | INNER JOIN jobs j ON j.id = jof.job_id
6 | WHERE df.wallet_address != j.wallet_address
7 | and df.wallet_address = ''
8 | )
9 | UPDATE data_files df
10 | SET wallet_address = mw.wallet_address
11 | FROM mismatched_wallets mw
12 | WHERE df.cid = mw.cid;
--------------------------------------------------------------------------------
/gateway/migrations/13_add_user_admins.down.sql:
--------------------------------------------------------------------------------
1 | -- Down migration to remove the 'admin' column from the 'users' table
2 | ALTER TABLE users DROP COLUMN admin;
--------------------------------------------------------------------------------
/gateway/migrations/13_add_user_admins.up.sql:
--------------------------------------------------------------------------------
1 | -- Up migration to add the 'admin' column to the 'users' table
2 | ALTER TABLE users ADD COLUMN admin BOOLEAN DEFAULT false;
--------------------------------------------------------------------------------
/gateway/migrations/14_add_datafile_public_attribute.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE `data_files`
2 | DROP COLUMN `public`;
--------------------------------------------------------------------------------
/gateway/migrations/14_add_datafile_public_attribute.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE data_files
2 | ADD COLUMN public BOOLEAN NOT NULL DEFAULT FALSE;
--------------------------------------------------------------------------------
/gateway/migrations/15_create_user_data_file_table.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE user_datafiles (
2 | wallet_address varchar(42) NOT NULL,
3 | data_file_c_id varchar(255) NOT NULL,
4 | created_at timestamp DEFAULT CURRENT_TIMESTAMP,
5 | CONSTRAINT pk_user_datafiles PRIMARY KEY (wallet_address, data_file_c_id),
6 | CONSTRAINT fk_user_datafiles_wallet_address FOREIGN KEY (wallet_address) REFERENCES users(wallet_address),
7 | CONSTRAINT fk_user_datafiles_data_file FOREIGN KEY (data_file_c_id) REFERENCES data_files(cid)
8 | );
9 |
10 | INSERT INTO user_datafiles (wallet_address, data_file_c_id, created_at)
11 | SELECT wallet_address, cid, COALESCE(timestamp, CURRENT_TIMESTAMP)
12 | FROM data_files;
13 |
14 | -- below steps will be done as a separate migration after we test and make sure the above information has been copied over correctly
15 | -- ALTER TABLE data_files
16 | -- DROP COLUMN wallet_address,
17 | -- DROP COLUMN timestamp;
18 |
--------------------------------------------------------------------------------
/gateway/migrations/16_add_flow_public_attribute.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE flows
2 | DROP COLUMN public;
--------------------------------------------------------------------------------
/gateway/migrations/16_add_flow_public_attribute.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE flows
2 | ADD COLUMN public BOOLEAN NOT NULL DEFAULT FALSE;
--------------------------------------------------------------------------------
/gateway/migrations/17_add_job_public_attribute.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs
2 | DROP COLUMN public;
--------------------------------------------------------------------------------
/gateway/migrations/17_add_job_public_attribute.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs
2 | ADD COLUMN public BOOLEAN NOT NULL DEFAULT FALSE;
--------------------------------------------------------------------------------
/gateway/migrations/18_add_exp_uuid.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE flows
2 | DROP COLUMN flow_uuid;
--------------------------------------------------------------------------------
/gateway/migrations/18_add_exp_uuid.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE flows
2 | ADD COLUMN flow_uuid UUID;
--------------------------------------------------------------------------------
/gateway/migrations/19_add_flow_record_cid_attribute.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE flows DROP COLUMN record_cid;
--------------------------------------------------------------------------------
/gateway/migrations/19_add_flow_record_cid_attribute.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE flows ADD COLUMN record_cid VARCHAR(255);
--------------------------------------------------------------------------------
/gateway/migrations/1_initial_schema.down.sql:
--------------------------------------------------------------------------------
1 | -- Drop many-to-many relation tables
2 | DROP TABLE IF EXISTS job_inputs;
3 | DROP TABLE IF EXISTS job_outputs;
4 |
5 | -- Drop users table
6 | DROP TABLE IF EXISTS users;
7 |
8 | -- Drop tools table
9 | DROP TABLE IF EXISTS tools;
10 |
11 | -- Drop jobs table along with its indexes
12 | DROP INDEX IF EXISTS idx_jobs_tool_id;
13 | DROP INDEX IF EXISTS idx_jobs_flow_id;
14 | DROP TABLE IF EXISTS jobs;
15 |
16 | -- Drop flows table
17 | DROP TABLE IF EXISTS flows;
18 |
19 | -- Drop data_files table
20 | DROP TABLE IF EXISTS data_files;
21 |
--------------------------------------------------------------------------------
/gateway/migrations/20_add_job_retry_max_runtime.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs
2 | DROP COLUMN retry_count;
3 |
4 | ALTER TABLE tools
5 | DROP COLUMN max_running_time;
--------------------------------------------------------------------------------
/gateway/migrations/20_add_job_retry_max_runtime.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs
2 | ADD COLUMN retry_count INT DEFAULT 0;
3 |
4 | ALTER TABLE tools
5 | ADD COLUMN max_running_time INT DEFAULT 2700;
--------------------------------------------------------------------------------
/gateway/migrations/21_jobs_job_type.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs DROP COLUMN job_type;
2 | ALTER TABLE jobs RENAME COLUMN job_id TO bacalhau_job_id;
--------------------------------------------------------------------------------
/gateway/migrations/21_jobs_job_type.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs RENAME COLUMN bacalhau_job_id TO job_id;
2 | ALTER TABLE jobs ADD COLUMN job_type VARCHAR(255) NOT NULL DEFAULT 'bacalhau';
--------------------------------------------------------------------------------
/gateway/migrations/22_add_job_result_json_column.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs
2 | DROP COLUMN result_json;
--------------------------------------------------------------------------------
/gateway/migrations/22_add_job_result_json_column.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs
2 | ADD COLUMN result_json JSONB;
--------------------------------------------------------------------------------
/gateway/migrations/23_datafile_s3_location_columns.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE datafile DROP COLUMN s3_bucket;
2 | ALTER TABLE datafile DROP COLUMN s3_location;
--------------------------------------------------------------------------------
/gateway/migrations/23_datafile_s3_location_columns.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE data_files ADD COLUMN s3_bucket VARCHAR(255);
2 | ALTER TABLE data_files ADD COLUMN s3_location VARCHAR(255);
--------------------------------------------------------------------------------
/gateway/migrations/24_tool_ray_service_endpoint_rename.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE tools DROP COLUMN ray_service_endpoint VARCHAR(255);
--------------------------------------------------------------------------------
/gateway/migrations/24_tool_ray_service_endpoint_rename.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE tools ADD COLUMN ray_service_endpoint VARCHAR(255);
--------------------------------------------------------------------------------
/gateway/migrations/25_s3_uri_in_datafile_table.down.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 |
3 | ALTER TABLE data_files ADD COLUMN s3_bucket VARCHAR(255);
4 | ALTER TABLE data_files ADD COLUMN s3_location VARCHAR(255);
5 |
6 | UPDATE data_files
7 | SET s3_bucket = SPLIT_PART(SUBSTRING(s3_uri FROM 6), '/', 1)
8 | WHERE s3_uri LIKE 's3://%/%';
9 |
10 | UPDATE data_files
11 | SET s3_location = SUBSTRING(s3_uri FROM LENGTH(SPLIT_PART(SUBSTRING(s3_uri FROM 6), '/', 1)) + 7)
12 | WHERE s3_uri LIKE 's3://%/%';
13 |
14 | ALTER TABLE data_files DROP COLUMN s3_uri;
15 |
16 | COMMIT;
17 |
--------------------------------------------------------------------------------
/gateway/migrations/25_s3_uri_in_datafile_table.up.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 |
3 | ALTER TABLE data_files ADD COLUMN s3_uri VARCHAR(255);
4 |
5 | UPDATE data_files
6 | SET s3_uri = CONCAT(
7 | 's3://',
8 | s3_bucket,
9 | CASE
10 | WHEN LEFT(s3_location, 1) != '/' THEN '/'
11 | ELSE ''
12 | END,
13 | s3_location
14 | )
15 | WHERE s3_bucket IS NOT NULL AND s3_location IS NOT NULL;
16 |
17 | ALTER TABLE data_files DROP COLUMN s3_bucket;
18 | ALTER TABLE data_files DROP COLUMN s3_location;
19 |
20 | COMMIT;
21 |
--------------------------------------------------------------------------------
/gateway/migrations/26_tool_s3_uri_column.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE tools ADD COLUMN s3_uri VARCHAR(255);
--------------------------------------------------------------------------------
/gateway/migrations/26_tool_s3_uri_column.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE tools ADD COLUMN s3_uri VARCHAR(255);
--------------------------------------------------------------------------------
/gateway/migrations/27_flow_drop_cid.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE flows ADD COLUMN cid VARCHAR(255);
--------------------------------------------------------------------------------
/gateway/migrations/27_flow_drop_cid.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE flows DROP COLUMN cid;
--------------------------------------------------------------------------------
/gateway/migrations/28_job_id_updates.up copy.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs RENAME COLUMN ray_job_id TO job_id;
2 | ALTER TABLE jobs ADD COLUMN job_uuid UUID;
--------------------------------------------------------------------------------
/gateway/migrations/28_job_id_updates.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs RENAME COLUMN job_id TO ray_job_id;
2 | ALTER TABLE jobs DROP COLUMN job_uuid;
--------------------------------------------------------------------------------
/gateway/migrations/29_add_tier_and_compute_tally.down.sql:
--------------------------------------------------------------------------------
1 | -- Remove the 'compute_tally' column from the 'users' table
2 | ALTER TABLE users DROP COLUMN compute_tally;
3 |
4 | -- Remove the 'tier' column from the 'users' table
5 | ALTER TABLE users DROP COLUMN tier;
--------------------------------------------------------------------------------
/gateway/migrations/29_add_tier_and_compute_tally.up.sql:
--------------------------------------------------------------------------------
1 | -- Add the 'tier' column to the 'users' table
2 | ALTER TABLE users ADD COLUMN tier INTEGER NOT NULL DEFAULT 0;
3 |
4 | -- Add the 'compute_tally' column to the 'users' table
5 | ALTER TABLE users ADD COLUMN compute_tally INTEGER NOT NULL DEFAULT 0;
--------------------------------------------------------------------------------
/gateway/migrations/2_add_tags_schema.down.sql:
--------------------------------------------------------------------------------
1 | -- Drop many-to-many relation table between data_files and tags
2 | DROP TABLE IF EXISTS datafile_tags;
3 |
4 | -- Drop tags table
5 | DROP TABLE IF EXISTS tags;
6 |
--------------------------------------------------------------------------------
/gateway/migrations/2_add_tags_schema.up.sql:
--------------------------------------------------------------------------------
1 | -- Create tags table
2 | CREATE TABLE tags (
3 | name VARCHAR(255) NOT NULL PRIMARY KEY,
4 | type VARCHAR(100) NOT NULL
5 | );
6 |
7 | -- Create many-to-many relation table between data_files and tags
8 | CREATE TABLE datafile_tags (
9 | data_file_c_id VARCHAR(255) NOT NULL,
10 | tag_name VARCHAR(255) NOT NULL,
11 | PRIMARY KEY (data_file_c_id, tag_name),
12 | FOREIGN KEY (data_file_c_id) REFERENCES data_files(cid),
13 | FOREIGN KEY (tag_name) REFERENCES tags(name)
14 | );
15 |
--------------------------------------------------------------------------------
/gateway/migrations/30_add_compute_cost.down.sql:
--------------------------------------------------------------------------------
1 | -- Remove the 'compute_cost' column from the 'tools' table
2 | ALTER TABLE tools DROP COLUMN compute_cost;
--------------------------------------------------------------------------------
/gateway/migrations/30_add_compute_cost.up.sql:
--------------------------------------------------------------------------------
1 | -- Add the 'compute_cost' column to the 'tools' table
2 | ALTER TABLE tools ADD COLUMN compute_cost INTEGER NOT NULL DEFAULT 0;
--------------------------------------------------------------------------------
/gateway/migrations/31_add_stripe_user_id.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE users DROP COLUMN stripe_user_id;
--------------------------------------------------------------------------------
/gateway/migrations/31_add_stripe_user_id.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE users ADD COLUMN stripe_user_id VARCHAR(255) UNIQUE;
--------------------------------------------------------------------------------
/gateway/migrations/32_flow_to_experiment_rename.down.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 |
3 | ALTER TABLE experiments RENAME TO flows;
4 |
5 | ALTER TABLE flows RENAME COLUMN experiment_uuid TO flow_uuid;
6 |
7 | ALTER TABLE jobs RENAME COLUMN experiment_id TO flow_id;
8 |
9 | DROP INDEX idx_jobs_experiment_id;
10 | CREATE INDEX idx_jobs_flow_id ON jobs USING btree (flow_id);
11 |
12 | ALTER SEQUENCE experiments_id_seq RENAME TO flows_id_seq;
13 |
14 | ALTER TABLE flows RENAME CONSTRAINT experiments_pkey TO flows_pkey;
15 | ALTER TABLE jobs RENAME CONSTRAINT jobs_experimentid_fkey TO jobs_flowid_fkey;
16 |
17 | COMMIT;
--------------------------------------------------------------------------------
/gateway/migrations/32_flow_to_experiment_rename.up.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 |
3 | ALTER TABLE flows RENAME TO experiments;
4 |
5 | ALTER TABLE experiments RENAME COLUMN flow_uuid TO experiment_uuid;
6 |
7 | ALTER TABLE jobs RENAME COLUMN flow_id TO experiment_id;
8 |
9 | DROP INDEX idx_jobs_flow_id;
10 | CREATE INDEX idx_jobs_experiment_id ON jobs USING btree (experiment_id);
11 |
12 | ALTER SEQUENCE flows_id_seq RENAME TO experiments_id_seq;
13 |
14 | ALTER TABLE experiments RENAME CONSTRAINT flows_pkey TO experiments_pkey;
15 | ALTER TABLE jobs RENAME CONSTRAINT jobs_flowid_fkey TO jobs_experimentid_fkey;
16 |
17 | COMMIT;
--------------------------------------------------------------------------------
/gateway/migrations/33_datafile_to_file_rename.down.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 |
3 | ALTER TABLE files RENAME TO data_files;
4 | ALTER TABLE file_tags RENAME TO datafile_tags;
5 | ALTER TABLE user_files RENAME TO user_datafiles;
6 |
7 | ALTER TABLE datafile_tags RENAME file_c_id TO data_file_c_id;
8 | ALTER TABLE user_datafiles RENAME file_c_id TO data_file_c_id;
9 |
10 | ALTER TABLE job_input_files RENAME COLUMN file_c_id TO data_file_c_id;
11 | ALTER TABLE job_output_files RENAME COLUMN file_c_id TO data_file_c_id;
12 |
13 | ALTER TABLE data_files RENAME CONSTRAINT files_pkey TO data_files_pkey;
14 |
15 | ALTER TABLE datafile_tags RENAME CONSTRAINT file_tags_pkey TO datafile_tags_pkey;
16 | ALTER TABLE datafile_tags RENAME CONSTRAINT file_tags_file_c_id_fkey TO datafile_tags_data_file_c_id_fkey;
17 | ALTER TABLE datafile_tags RENAME CONSTRAINT file_tags_tag_name_fkey TO datafile_tags_tag_name_fkey;
18 |
19 | ALTER TABLE user_datafiles RENAME CONSTRAINT pk_user_files TO pk_user_datafiles;
20 | ALTER TABLE user_datafiles RENAME CONSTRAINT fk_user_files_file TO fk_user_datafiles_data_file;
21 | ALTER TABLE user_datafiles RENAME CONSTRAINT fk_user_files_wallet_address TO fk_user_datafiles_wallet_address;
22 |
23 | ALTER TABLE job_input_files RENAME CONSTRAINT job_input_files_file_c_id_fkey TO job_input_files_data_file_c_id_fkey;
24 | ALTER TABLE job_output_files RENAME CONSTRAINT job_output_files_file_c_id_fkey TO job_output_files_data_file_c_id_fkey;
25 |
26 | COMMIT;
--------------------------------------------------------------------------------
/gateway/migrations/33_datafile_to_file_rename.up.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 |
3 | ALTER TABLE data_files RENAME TO files;
4 | ALTER TABLE datafile_tags RENAME TO file_tags;
5 | ALTER TABLE user_datafiles RENAME TO user_files;
6 |
7 | ALTER TABLE file_tags RENAME data_file_c_id TO file_c_id;
8 | ALTER TABLE user_files RENAME data_file_c_id TO file_c_id;
9 |
10 | ALTER TABLE job_input_files RENAME COLUMN data_file_c_id TO file_c_id;
11 | ALTER TABLE job_output_files RENAME COLUMN data_file_c_id TO file_c_id;
12 |
13 | ALTER TABLE files RENAME CONSTRAINT data_files_pkey TO files_pkey;
14 |
15 | ALTER TABLE file_tags RENAME CONSTRAINT datafile_tags_pkey TO file_tags_pkey;
16 | ALTER TABLE file_tags RENAME CONSTRAINT datafile_tags_data_file_c_id_fkey TO file_tags_file_c_id_fkey;
17 | ALTER TABLE file_tags RENAME CONSTRAINT datafile_tags_tag_name_fkey TO file_tags_tag_name_fkey;
18 |
19 | ALTER TABLE user_files RENAME CONSTRAINT pk_user_datafiles TO pk_user_files;
20 | ALTER TABLE user_files RENAME CONSTRAINT fk_user_datafiles_data_file TO fk_user_files_file;
21 | ALTER TABLE user_files RENAME CONSTRAINT fk_user_datafiles_wallet_address TO fk_user_files_wallet_address;
22 |
23 | ALTER TABLE job_input_files RENAME CONSTRAINT job_input_files_data_file_c_id_fkey TO job_input_files_file_c_id_fkey;
24 | ALTER TABLE job_output_files RENAME CONSTRAINT job_output_files_data_file_c_id_fkey TO job_output_files_file_c_id_fkey;
25 |
26 | COMMIT;
--------------------------------------------------------------------------------
/gateway/migrations/34_tool_to_model_rename.down.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 |
3 | ALTER TABLE models RENAME TO tools;
4 |
5 | ALTER TABLE jobs RENAME COLUMN model_id TO tool_id;
6 | ALTER TABLE tools RENAME model_json TO tool_json;
7 | ALTER TABLE tools RENAME COLUMN default_model TO default_tool;
8 |
9 | DROP INDEX idx_jobs_model_id;
10 | CREATE INDEX idx_jobs_tool_id ON jobs USING btree (tool_id);
11 |
12 | ALTER TABLE tools RENAME CONSTRAINT models_name_key TO tools_name_key;
13 | ALTER TABLE tools RENAME CONSTRAINT models_pkey TO tools_pkey;
14 |
15 | COMMIT;
--------------------------------------------------------------------------------
/gateway/migrations/34_tool_to_model_rename.up.sql:
--------------------------------------------------------------------------------
1 | BEGIN;
2 |
3 | ALTER TABLE tools RENAME TO models;
4 |
5 | ALTER TABLE jobs RENAME COLUMN tool_id TO model_id;
6 | ALTER TABLE models RENAME tool_json TO model_json;
7 | ALTER TABLE models RENAME COLUMN default_tool TO default_model;
8 |
9 | DROP INDEX idx_jobs_tool_id;
10 | CREATE INDEX idx_jobs_model_id ON jobs USING btree (model_id);
11 |
12 | ALTER TABLE models RENAME CONSTRAINT tools_name_key TO models_name_key;
13 | ALTER TABLE models RENAME CONSTRAINT tools_pkey TO models_pkey;
14 |
15 | COMMIT;
--------------------------------------------------------------------------------
/gateway/migrations/36_user_subscriptions.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE users
2 | DROP COLUMN subscription_status,
3 | DROP COLUMN subscription_id;
--------------------------------------------------------------------------------
/gateway/migrations/36_user_subscriptions.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE users
2 | ADD COLUMN subscription_status VARCHAR(255),
3 | ADD COLUMN subscription_id VARCHAR(255) UNIQUE NULL;
--------------------------------------------------------------------------------
/gateway/migrations/37_filehash_nullable.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE files ALTER COLUMN file_hash SET NOT NULL;
--------------------------------------------------------------------------------
/gateway/migrations/37_filehash_nullable.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE files ALTER COLUMN file_hash DROP NOT NULL;
--------------------------------------------------------------------------------
/gateway/migrations/38_job_type_column.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs DROP COLUMN IF EXISTS job_type;
2 | ALTER TABLE experiments DROP COLUMN IF EXISTS experiment_uuid;
--------------------------------------------------------------------------------
/gateway/migrations/38_job_type_column.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE jobs ADD COLUMN IF NOT EXISTS job_type VARCHAR;
2 | ALTER TABLE experiments ADD COLUMN IF NOT EXISTS experiment_uuid VARCHAR;
--------------------------------------------------------------------------------
/gateway/migrations/39_inference_event_filename_column.down.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE inference_events DROP COLUMN IF EXISTS file_name;
--------------------------------------------------------------------------------
/gateway/migrations/39_inference_event_filename_column.up.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE inference_events ADD COLUMN IF NOT EXISTS file_name VARCHAR;
--------------------------------------------------------------------------------
/gateway/migrations/3_seed_tags_table.down.sql:
--------------------------------------------------------------------------------
1 | -- Remove seeded entries from tags table
2 | DELETE FROM tags WHERE name IN ('uploaded', 'generated');
3 |
--------------------------------------------------------------------------------
/gateway/migrations/3_seed_tags_table.up.sql:
--------------------------------------------------------------------------------
1 | -- Seed tags table with predefined values
2 | INSERT INTO tags (name, type) VALUES
3 | ('uploaded', 'autogenerated'),
4 | ('generated', 'autogenerated')
5 | ON CONFLICT (name) DO NOTHING; -- This prevents duplication if the migration is run multiple times
6 |
--------------------------------------------------------------------------------
/gateway/migrations/4_add_timestamps.down.sql:
--------------------------------------------------------------------------------
1 | -- Remove StartTime and EndTime columns from flows table
2 | ALTER TABLE flows
3 | DROP COLUMN IF EXISTS start_time,
4 | DROP COLUMN IF EXISTS end_time;
5 |
6 | -- Remove Timestamp column from tools table
7 | ALTER TABLE tools
8 | DROP COLUMN IF EXISTS timestamp;
9 |
10 | -- Remove CreatedAt column from users table
11 | ALTER TABLE users
12 | DROP COLUMN IF EXISTS created_at;
13 |
14 | -- Remove CreatedAt, StartedAt, CompletedAt columns from jobs table
15 | ALTER TABLE jobs
16 | DROP COLUMN IF EXISTS created_at,
17 | DROP COLUMN IF EXISTS started_at,
18 | DROP COLUMN IF EXISTS completed_at;
19 |
--------------------------------------------------------------------------------
/gateway/migrations/4_add_timestamps.up.sql:
--------------------------------------------------------------------------------
1 | -- Add StartTime and EndTime columns to flows table
2 | ALTER TABLE flows
3 | ADD COLUMN start_time TIMESTAMP,
4 | ADD COLUMN end_time TIMESTAMP;
5 |
6 | -- Add Timestamp column to tools table
7 | ALTER TABLE tools
8 | ADD COLUMN timestamp TIMESTAMP;
9 |
10 | -- Add CreatedAt column to users table
11 | ALTER TABLE users
12 | ADD COLUMN created_at TIMESTAMP;
13 |
14 | -- Add CreatedAt, StartedAt, CompletedAt columns to jobs table
15 | ALTER TABLE jobs
16 | ADD COLUMN created_at TIMESTAMP,
17 | ADD COLUMN started_at TIMESTAMP,
18 | ADD COLUMN completed_at TIMESTAMP;
19 |
--------------------------------------------------------------------------------
/gateway/migrations/6_add_tool_flags.down.sql:
--------------------------------------------------------------------------------
1 | -- This migration removes the display, task_category, and default_tool columns from the tools table.
2 | ALTER TABLE tools DROP COLUMN display;
3 | ALTER TABLE tools DROP COLUMN task_category;
4 | ALTER TABLE tools DROP COLUMN default_tool;
5 |
--------------------------------------------------------------------------------
/gateway/migrations/6_add_tool_flags.up.sql:
--------------------------------------------------------------------------------
1 | -- This migration adds the display, task_category, and default_tool flags to the tools table.
2 | ALTER TABLE tools ADD COLUMN display BOOLEAN DEFAULT true;
3 | ALTER TABLE tools ADD COLUMN task_category VARCHAR(255) DEFAULT 'other-models';
4 | ALTER TABLE tools ADD COLUMN default_tool BOOLEAN DEFAULT false;
--------------------------------------------------------------------------------
/gateway/migrations/7_add_api_keys_table.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE IF EXISTS api_keys;
2 |
--------------------------------------------------------------------------------
/gateway/migrations/7_add_api_keys_table.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE api_keys (
2 | id SERIAL PRIMARY KEY,
3 | key VARCHAR(255) NOT NULL UNIQUE,
4 | scope VARCHAR(255) NOT NULL,
5 | created_at TIMESTAMP NOT NULL,
6 | expires_at TIMESTAMP NOT NULL,
7 | user_id VARCHAR(42) NOT NULL,
8 | CONSTRAINT fk_user FOREIGN KEY (user_id) REFERENCES users(wallet_address)
9 | );
10 |
--------------------------------------------------------------------------------
/gateway/migrations/8_add_api_keys_to_users.down.sql:
--------------------------------------------------------------------------------
1 | -- Remove the DID column from the users table
2 | ALTER TABLE users
3 | DROP COLUMN did;
4 |
5 | -- Remove the foreign key constraint from the api_keys table
6 | ALTER TABLE api_keys
7 | DROP CONSTRAINT fk_user;
--------------------------------------------------------------------------------
/gateway/migrations/8_add_api_keys_to_users.up.sql:
--------------------------------------------------------------------------------
1 | -- Add the DID column to the users table
2 | ALTER TABLE users
3 | ADD COLUMN did VARCHAR(255) UNIQUE;
4 |
5 | -- Ensure the foreign key constraint from the api_keys table to the users table is correct
6 | ALTER TABLE api_keys
7 | DROP CONSTRAINT IF EXISTS fk_user,
8 | ADD CONSTRAINT fk_user FOREIGN KEY (user_id) REFERENCES users(wallet_address);
--------------------------------------------------------------------------------
/gateway/migrations/9_add_transaction_table.down.sql:
--------------------------------------------------------------------------------
1 | DROP TABLE IF EXISTS transactions;
2 |
--------------------------------------------------------------------------------
/gateway/migrations/9_add_transaction_table.up.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE transactions (
2 | id VARCHAR(255) PRIMARY KEY,
3 | amount FLOAT NOT NULL,
4 | is_debit BOOLEAN NOT NULL,
5 | user_id VARCHAR(42) NOT NULL,
6 | description TEXT,
7 | created_at TIMESTAMP WITHOUT TIME ZONE DEFAULT (now() at time zone 'utc'),
8 | FOREIGN KEY (user_id) REFERENCES users(wallet_address) ON DELETE CASCADE
9 | );
10 |
11 |
--------------------------------------------------------------------------------
/gateway/models/design.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import "gorm.io/datatypes"
4 |
5 | type Design struct {
6 | ID int `json:"id"`
7 | JobID int `json:"job_id"`
8 | XAxisValue string `json:"x_axis_value"`
9 | YAxisValue string `json:"y_axis_value"`
10 | CheckpointPDBID int `json:"checkpoint_pdb_id"`
11 | AdditionalDetails datatypes.JSON `json:""`
12 | }
13 |
--------------------------------------------------------------------------------
/gateway/models/experiment.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import "time"
4 |
5 | type Experiment struct {
6 | ID uint `gorm:"primaryKey;autoIncrement"`
7 | Jobs []Job `gorm:"foreignKey:ExperimentID"`
8 | Name string `gorm:"type:varchar(255);"`
9 | Public bool `gorm:"type:boolean;not null;default:false"`
10 | RecordCID string `gorm:"column:record_cid;type:varchar(255);"`
11 | WalletAddress string `gorm:"type:varchar(42);not null"`
12 | CreatedAt time.Time `gorm:"autoCreateTime"`
13 | ExperimentUUID string `gorm:"type:varchar(255);"`
14 | }
15 |
--------------------------------------------------------------------------------
/gateway/models/file.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import (
4 | "time"
5 | )
6 |
7 | type File struct {
8 | ID int `gorm:"primaryKey;autoIncrement"`
9 | FileHash string `gorm:"type:varchar(64)"`
10 | WalletAddress string `gorm:"type:varchar(42);not null"`
11 | Filename string `gorm:"type:varchar(255);not null"`
12 | InputFiles []Job `gorm:"many2many:job_input_files;foreignKey:ID;joinForeignKey:file_id;References:ID;JoinReferences:job_id"`
13 | OutputFiles []Job `gorm:"many2many:job_output_files;foreignKey:ID;joinForeignKey:file_id;inverseJoinForeignKey:job_id"`
14 | Tags []Tag `gorm:"many2many:file_tags;foreignKey:ID;joinForeignKey:file_id;inverseJoinForeignKey:tag_name"`
15 | Public bool `gorm:"type:boolean;not null;default:false"`
16 | UserFiles []User `gorm:"many2many:user_files;foreignKey:ID;joinForeignKey:file_id;inverseJoinForeignKey:wallet_address"`
17 | S3URI string `gorm:"type:varchar(255)"`
18 | CreatedAt time.Time `gorm:"autoCreateTime"`
19 | LastModifiedAt time.Time `gorm:"autoUpdateTime"`
20 | }
21 |
--------------------------------------------------------------------------------
/gateway/models/fileevent.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import "time"
4 |
5 | type FileEvent struct {
6 | ID int `json:"id"`
7 | FileID int `json:"file_id"`
8 | WalletAddress string `json:"wallet_address"`
9 | EventTime time.Time `json:"event_time"`
10 | EventType string `json:"event_type"`
11 | }
12 |
--------------------------------------------------------------------------------
/gateway/models/inferenceevent.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import (
4 | "time"
5 |
6 | "gorm.io/datatypes"
7 | )
8 |
9 | // event type can only be certain string values
10 | const (
11 | EventTypeJobQueued = "job_queued"
12 | EventTypeJobProcessing = "job_processing"
13 | EventTypeJobPending = "job_pending"
14 | EventTypeJobRunning = "job_running"
15 | EventTypeJobStopped = "job_stopped"
16 | EventTypeJobSucceeded = "job_succeeded"
17 | EventTypeJobFailed = "job_failed"
18 | EventTypeFileProcessed = "file_processed"
19 | )
20 |
21 | // retry default 0?
22 | type InferenceEvent struct {
23 | ID uint `gorm:"primaryKey;autoIncrement"`
24 | JobID uint `gorm:"not null"`
25 | Job Job `gorm:"foreignKey:JobID"`
26 | RayJobID string `gorm:"type:varchar(255);not null"`
27 | InputJson datatypes.JSON `gorm:"type:json"`
28 | OutputJson datatypes.JSON `gorm:"type:json"`
29 | RetryCount int `gorm:"not null"`
30 | JobStatus JobState `gorm:"type:varchar(255);default:'queued'"`
31 | ResponseCode int `gorm:"type:int"`
32 | EventTime time.Time `gorm:""`
33 | EventMessage string `gorm:"type:text"`
34 | EventType string `gorm:"type:varchar(255);not null"`
35 | FileName string `gorm:"type:varchar(255)"`
36 | }
37 |
--------------------------------------------------------------------------------
/gateway/models/key.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import "time"
4 |
5 | const (
6 | ScopeReadOnly = "read-only"
7 | ScopeReadWrite = "read-write"
8 | ScopeAdmin = "admin"
9 | )
10 |
11 | type APIKey struct {
12 | ID uint `gorm:"primaryKey;autoIncrement"`
13 | Key string `gorm:"type:varchar(255);not null;unique"`
14 | Scope string `gorm:"type:varchar(255);not null"`
15 | UserID string `gorm:"type:varchar(42);not null"`
16 | User User `gorm:"foreignKey:UserID"`
17 | CreatedAt time.Time `gorm:"autoCreateTime"`
18 | ExpiresAt time.Time `gorm:""`
19 | RevokedAt time.Time `gorm:""`
20 | }
21 |
--------------------------------------------------------------------------------
/gateway/models/model.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import (
4 | "time"
5 |
6 | "gorm.io/datatypes"
7 | )
8 |
9 | type JobType string
10 |
11 | const (
12 | JobTypeJob JobType = "job"
13 | JobTypeService JobType = "service"
14 | )
15 |
16 | type Model struct {
17 | ID int `gorm:"primaryKey;autoIncrement"`
18 | Name string `gorm:"type:text;not null;unique"`
19 | WalletAddress string `gorm:"type:varchar(42);not null"`
20 | ModelJson datatypes.JSON `gorm:"type:json"`
21 | CreatedAt time.Time `gorm:"autoCreateTime"`
22 | Display bool `gorm:"type:boolean;default:true"`
23 | TaskCategory string `gorm:"type:text;default:'community-models'"`
24 | DefaultModel bool `gorm:"type:boolean;default:false"`
25 | MaxRunningTime int `gorm:"type:int;default:2700"`
26 | ComputeCost int `gorm:"type:int;not null;default:0"`
27 | RayEndpoint string `gorm:"type:varchar(255)"`
28 | RayJobEntrypoint string `gorm:"type:varchar(255)"`
29 | S3URI string `gorm:"type:varchar(255)"`
30 | JobType JobType `gorm:"type:text;default:'job'"`
31 | }
32 |
--------------------------------------------------------------------------------
/gateway/models/organization.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | type Organization struct {
4 | ID uint `gorm:"primaryKey;autoIncrement"`
5 | Name string `gorm:"type:varchar(255);not null;unique"`
6 | Description string `gorm:"type:text"`
7 | }
8 |
--------------------------------------------------------------------------------
/gateway/models/rayjobresponse.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | type RayJobResponse struct {
4 | UUID string `json:"uuid"`
5 | PDB FileDetail `json:"pdb"`
6 | Scores map[string]float64 `json:"-"`
7 | Files map[string]FileDetail `json:"-"`
8 | Points int `json:"points"`
9 | }
10 |
11 | type FileDetail struct {
12 | URI string `json:"uri"`
13 | }
14 |
--------------------------------------------------------------------------------
/gateway/models/scatterplotdata.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | type ScatterPlotData struct {
4 | Plddt float64 `json:"plddt"`
5 | IPae float64 `json:"i_pae"`
6 | Checkpoint string `json:"checkpoint"`
7 | StructureFile string `json:"structureFile"`
8 | PdbFilePath string `json:"pdbFilePath"`
9 | RayJobID string `json:"rayJobId"`
10 | }
11 |
--------------------------------------------------------------------------------
/gateway/models/tag.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | type Tag struct {
4 | Name string `gorm:"primaryKey;type:varchar(255);not null;unique"`
5 | Type string `gorm:"type:varchar(100);not null"`
6 | Files []File `gorm:"many2many:file_tags;foreignKey:Name;joinForeignKey:tag_name;inverseJoinForeignKey:file_id"`
7 | // Files []File `gorm:"many2many:file_tags;foreignKey:Name;joinForeignKey:tag_name;inverseJoinForeignKey:file_id"
8 | }
9 |
--------------------------------------------------------------------------------
/gateway/models/transaction.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import (
4 | "time"
5 | )
6 |
7 | type Transaction struct {
8 | ID string `gorm:"primaryKey;type:varchar(255)" json:"id"`
9 | Amount float64 `gorm:"type:float" json:"amount"`
10 | IsDebit bool `gorm:"type:boolean" json:"isDebit"`
11 | WalletAddress string `gorm:"type:varchar(42);not null" json:"walletAddress"`
12 | Description string `gorm:"type:text" json:"description"`
13 | CreatedAt time.Time `gorm:""`
14 | }
15 |
--------------------------------------------------------------------------------
/gateway/models/user.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import "time"
4 |
5 | type Tier int
6 |
7 | const (
8 | TierFree Tier = iota
9 | TierPaid
10 | )
11 |
12 | type User struct {
13 | WalletAddress string `gorm:"primaryKey;type:varchar(42);not null" json:"walletAddress"`
14 | DID string `gorm:"column:did;type:varchar(255);unique" json:"did"`
15 | CreatedAt time.Time `gorm:""`
16 | APIKeys []APIKey `gorm:"foreignKey:UserID"`
17 | Admin bool `gorm:"column:admin;default:false" json:"admin"`
18 | UserFiles []File `gorm:"many2many:user_files;foreignKey:WalletAddress;joinForeignKey:wallet_address;inverseJoinForeignKey:file_id"`
19 | Tier Tier `gorm:"type:int;not null;default:0" json:"tier"`
20 | ComputeTally int `gorm:"column:compute_tally;default:0" json:"computeTally"`
21 | StripeUserID string `gorm:"column:stripe_user_id;type:varchar(255)" json:"stripeUserId"`
22 | OrganizationID uint `gorm:"column:organization_id"`
23 | Organization Organization `gorm:"foreignKey:OrganizationID"`
24 | SubscriptionStatus string `gorm:"column:subscription_status;type:varchar(255)" json:"subscriptionStatus"`
25 | SubscriptionID *string `gorm:"column:subscription_id;type:varchar(255);unique" json:"subscriptionId"`
26 | }
27 |
--------------------------------------------------------------------------------
/gateway/models/userevent.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import "time"
4 |
5 | type UserEvent struct {
6 | ID int `json:"id"`
7 | WalletAddress string `json:"wallet_address"`
8 | ApiKeyID int `json:"api_key_id"`
9 | EventTime time.Time `json:"event_time"`
10 | EventType string `json:"event_type"`
11 | }
12 |
--------------------------------------------------------------------------------
/gateway/utils/api.go:
--------------------------------------------------------------------------------
1 | package utils
2 |
3 | import (
4 | "crypto/rand"
5 | "encoding/base64"
6 | "fmt"
7 | "hash/crc32"
8 | "strings"
9 | "time"
10 |
11 | "github.com/labdao/plex/gateway/models"
12 | )
13 |
14 | func GenerateAPIKey(length int, label string) (string, error) {
15 | bytes := make([]byte, length)
16 | if _, err := rand.Read(bytes); err != nil {
17 | return "", fmt.Errorf("error generating random bytes: %v", err)
18 | }
19 |
20 | key := base64.URLEncoding.EncodeToString(bytes)
21 | checksum := crc32.ChecksumIEEE([]byte(key))
22 | keyWithChecksum := fmt.Sprintf("lab_%s_%x", key, checksum)
23 |
24 | return keyWithChecksum, nil
25 | }
26 |
27 | // useful for validation before DB lookup
28 | func ValidateAPIKey(key string) (bool, error) {
29 | parts := strings.Split(key, "_")
30 | if len(parts) != 3 || parts[0] != "lab" {
31 | return false, fmt.Errorf("invalid API key format")
32 | }
33 |
34 | encodedPart := parts[1]
35 | providedChecksum := parts[2]
36 |
37 | var checksum uint32
38 | _, err := fmt.Sscanf(providedChecksum, "%x", &checksum)
39 | if err != nil {
40 | return false, fmt.Errorf("error parsing checksum: %v", err)
41 | }
42 |
43 | decoded, err := base64.URLEncoding.DecodeString(encodedPart)
44 | if err != nil {
45 | return false, fmt.Errorf("base64 decoding failed: %v", err)
46 | }
47 | computedChecksum := crc32.ChecksumIEEE(decoded)
48 |
49 | return computedChecksum == checksum, nil
50 | }
51 |
52 | func IsAPIKeyExpired(apiKey models.APIKey) bool {
53 | return apiKey.ExpiresAt.Before(time.Now().UTC())
54 | }
55 |
--------------------------------------------------------------------------------
/gateway/utils/stripe.go:
--------------------------------------------------------------------------------
1 | package utils
2 |
3 | import (
4 | "fmt"
5 | "os"
6 | "strconv"
7 | "time"
8 |
9 | "github.com/stripe/stripe-go/v78"
10 | "github.com/stripe/stripe-go/v78/billing/meterevent"
11 | )
12 |
13 | func setupStripeClient() error {
14 | apiKey := os.Getenv("STRIPE_SECRET_KEY")
15 | if apiKey == "" {
16 | return fmt.Errorf("STRIPE_SECRET_KEY environment variable not set")
17 | }
18 | stripe.Key = apiKey
19 | return nil
20 | }
21 |
22 | func RecordUsage(stripeCustomerID string, usage int64) error {
23 | err := setupStripeClient()
24 | if err != nil {
25 | return fmt.Errorf("failed to set up Stripe client: %v", err)
26 | }
27 |
28 | params := &stripe.BillingMeterEventParams{
29 | EventName: stripe.String("compute_units"),
30 | Payload: map[string]string{
31 | "value": strconv.FormatInt(usage, 10),
32 | "stripe_customer_id": stripeCustomerID,
33 | },
34 | Identifier: stripe.String(fmt.Sprintf("usage-%d", time.Now().Unix())),
35 | }
36 | _, err = meterevent.New(params)
37 | if err != nil {
38 | return fmt.Errorf("failed to record usage: %v", err)
39 | }
40 | return nil
41 | }
42 |
--------------------------------------------------------------------------------
/infrastructure/ansible/ansible.cfg:
--------------------------------------------------------------------------------
1 | [defaults]
2 | host_key_checking=false
3 | inventory = inventory.aws_ec2.yaml
4 | remote_user = ubuntu
5 | private_key_file = ~/.ssh/steward-dev.pem
6 |
--------------------------------------------------------------------------------
/infrastructure/ansible/files/bacalhau.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=Bacalhau
3 |
4 | [Service]
5 | Restart=on-failure
6 | RestartSec=5s
7 | User=ubuntu
8 | Group=ubuntu
9 | ExecStart=bacalhau serve \
10 | --node-type compute,requester \
11 | --ipfs-connect {{ ipfs_connect }} \
12 | --private-internal-ipfs=false \
13 | --limit-total-gpu 1 \
14 | --limit-job-memory 12gb \
15 | --job-selection-accept-networked \
16 | --job-selection-data-locality anywhere \
17 | --job-selection-probe-http {{ receptor_url }} \
18 | --labels owner={{ owner }}
19 |
20 | [Install]
21 | WantedBy=multi-user.target
22 |
--------------------------------------------------------------------------------
/infrastructure/ansible/files/compute.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=Bacalhau
3 |
4 | [Service]
5 | Environment="KEEP_STACK=true"
6 | Restart=on-failure
7 | RestartSec=5s
8 | User=ubuntu
9 | Group=ubuntu
10 | ExecStart=bacalhau serve \
11 | --node-type compute \
12 | --ipfs-connect {{ ipfs_connect }} \
13 | --private-internal-ipfs=false \
14 | --labels owner={{ owner }} \
15 | {% if ansible_ec2_instance_type is defined %}
16 | --labels instance-type={{ ansible_ec2_instance_type }} \
17 | {% endif %}
18 | {% if ansible_ec2_instance_id is defined %}
19 | --labels instance-id={{ ansible_ec2_instance_id }} \
20 | {% endif %}
21 | {% if num_of_gpus is defined and num_of_gpus | int > 0 %}
22 | --labels node-type=gpu \
23 | {% else %}
24 | --labels node-type=cpu \
25 | {% endif %}
26 | --job-selection-probe-http {{ receptor_url }} \
27 | --peer {{ requester_peer }} \
28 | --job-selection-accept-networked \
29 | --job-selection-data-locality anywhere
30 |
31 | [Install]
32 | WantedBy=multi-user.target
33 |
--------------------------------------------------------------------------------
/infrastructure/ansible/files/compute.yaml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | node:
4 | compute:
5 | capacity:
6 | queueresourcelimits:
7 | cpu: 0m
8 | disk: 0 B
9 | gpu: "0"
10 | memory: 0 B
11 |
--------------------------------------------------------------------------------
/infrastructure/ansible/files/environment.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | c.Spawner.environment = {
4 | 'PLEX_DIR': '{{ plex_dir }}',
5 | }
6 |
--------------------------------------------------------------------------------
/infrastructure/ansible/files/ipfs.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=IPFS daemon
3 |
4 | [Service]
5 | Restart=on-failure
6 | RestartSec=5s
7 | User=ubuntu
8 | Group=ubuntu
9 | ExecStart=ipfs daemon --routing=dhtclient
10 | Environment="IPFS_PATH={{ ipfs_path }}"
11 |
12 | [Install]
13 | WantedBy=multi-user.target
14 |
--------------------------------------------------------------------------------
/infrastructure/ansible/files/jupyter_notebook_config.py:
--------------------------------------------------------------------------------
1 | import os
2 | os.umask(0o002)
3 |
--------------------------------------------------------------------------------
/infrastructure/ansible/files/receptor-config:
--------------------------------------------------------------------------------
1 | PGHOST=terraform-20230620164721003000000001.cqbgzlch0qtq.us-east-1.rds.amazonaws.com
2 | PGUSER=receptor
3 | PGDATABASE=receptor
4 | PGSSLMODE=verify-ca
5 | PGSSLROOTCERT={{ receptor_dir }}/global-bundle.pem
6 |
--------------------------------------------------------------------------------
/infrastructure/ansible/files/receptor.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=Receptor Web Service
3 |
4 | [Service]
5 | Restart=on-failure
6 | RestartSec=5s
7 | User=ubuntu
8 | Group=ubuntu
9 | WorkingDirectory={{ plex_dir }}/receptor
10 | ExecStart=go run main.go
11 | EnvironmentFile={{ receptor_dir }}/configs
12 | EnvironmentFile={{ receptor_dir }}/secrets
13 |
14 | [Install]
15 | WantedBy=multi-user.target
16 |
--------------------------------------------------------------------------------
/infrastructure/ansible/files/requester.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=Bacalhau Requester
3 |
4 | [Service]
5 | Restart=on-failure
6 | RestartSec=5s
7 | User=ubuntu
8 | Group=ubuntu
9 | ExecStart=bacalhau serve \
10 | --node-type requester \
11 | --ipfs-connect {{ ipfs_connect }} \
12 | --private-internal-ipfs=false \
13 | --job-selection-probe-http {{ receptor_url }} \
14 | --labels owner={{ owner }} \
15 | --job-selection-accept-networked \
16 | --job-selection-data-locality anywhere \
17 | --peer none
18 |
19 | [Install]
20 | WantedBy=multi-user.target
21 |
--------------------------------------------------------------------------------
/infrastructure/ansible/install_requirements.yaml:
--------------------------------------------------------------------------------
1 | - name: Install requirements on the host
2 | remote_user: ubuntu
3 | # Ability to override host, useful to running playbook in local mode
4 | hosts: "{{ target_hosts | default('all') }}"
5 | tasks:
6 | - name: Install collections and roles together
7 | community.general.ansible_galaxy_install:
8 | type: both
9 | requirements_file: "{{ playbook_dir }}/requirements.yaml"
10 |
--------------------------------------------------------------------------------
/infrastructure/ansible/inventory.aws_ec2.yaml:
--------------------------------------------------------------------------------
1 | # demo.aws_ec2.yml
2 | plugin: amazon.aws.aws_ec2
3 |
4 | # This sets the region. If empty (the default) default this will include all regions, except possibly
5 | # restricted ones like us-gov-west-1 and cn-north-1.
6 | regions:
7 | - us-east-1
8 |
9 | filters:
10 | instance-state-name: running
11 |
12 | keyed_groups:
13 | - prefix: tag
14 | key: tags
15 |
--------------------------------------------------------------------------------
/infrastructure/ansible/jupyter_set_users.yaml:
--------------------------------------------------------------------------------
1 | - name: Set Jupyter Users
2 | remote_user: ubuntu
3 | hosts: tag_Type_jupyter_notebook
4 | vars_files:
5 | - vars/teams.yaml
6 | tasks:
7 | - name: Create shared folder for teams
8 | become: true
9 | ansible.builtin.file:
10 | path: /srv/data/teams
11 | state: directory
12 |
13 | - name: Create admin users
14 | become: true
15 | ansible.builtin.command:
16 | cmd: tljh-config add-item users.admin {{ item }}
17 | loop: "{{ admins | default([])}}"
18 |
19 | - name: Create teams
20 | include_tasks: tasks/jupyter_team_setup_tasks.yaml
21 | loop: "{{ teams | default([])}}"
22 |
23 | - name: Reload the tljh configuration
24 | become: true
25 | ansible.builtin.command: tljh-config reload
26 |
--------------------------------------------------------------------------------
/infrastructure/ansible/provision_ipfs.yaml:
--------------------------------------------------------------------------------
1 | - name: Provision IPFS
2 | remote_user: ubuntu
3 | hosts: "{{ target_hosts | default('tag_Type_ipfs') }}"
4 | vars:
5 | ipfs_version: "0.18.0"
6 | ipfs_path: "/opt/ipfs"
7 | ipfs_connect: /ip4/127.0.0.1/tcp/5001
8 | tasks:
9 | # Must provide limit flag to ensure running against current environment
10 | - fail:
11 | msg: "you must use -l or --limit"
12 | when: ansible_limit is not defined
13 | run_once: true
14 |
15 | - name: Run common tasks
16 | ansible.builtin.include_tasks: tasks/common_tasks.yaml
17 |
18 | # Docker
19 | - name: Install Docker
20 | ansible.builtin.include_tasks: tasks/install_docker_tasks.yaml
21 |
22 | - name: Install IPFS
23 | ansible.builtin.include_tasks: tasks/install_ipfs_docker_tasks.yaml
24 |
25 | - name: Systemd Daemon Reload
26 | become: true
27 | ansible.builtin.systemd:
28 | daemon_reload: true
29 |
30 | handlers:
31 | - name: Restart IPFS
32 | become: true
33 | ansible.builtin.service:
34 | name: ipfs
35 | state: restarted
36 | enabled: true
37 |
--------------------------------------------------------------------------------
/infrastructure/ansible/provision_requester.yaml:
--------------------------------------------------------------------------------
1 | - name: Provision Bacalhau Requester
2 | remote_user: ubuntu
3 | hosts: "{{ target_hosts | default('tag_Type_requester') }}"
4 | vars:
5 | ipfs_version: "0.18.0"
6 | ipfs_path: "/opt/ipfs"
7 | bacalhau_version: "v1.2.0"
8 | bacalhau_node_type: "requester"
9 | ipfs_connect: /ip4/127.0.0.1/tcp/5001
10 | owner: labdao
11 | tasks:
12 | # Must provide limit flag to ensure running against current environment
13 | - fail:
14 | msg: "you must use -l or --limit"
15 | when: ansible_limit is not defined
16 | run_once: true
17 |
18 | - name: Run common tasks
19 | ansible.builtin.include_tasks: tasks/common_tasks.yaml
20 |
21 | # - name: Install IPFS
22 | # ansible.builtin.include_tasks: tasks/install_ipfs_tasks.yaml
23 |
24 | # Run Bacalhau agent
25 | - name: Install Bacalhau
26 | ansible.builtin.include_tasks: tasks/install_bacalhau_tasks.yaml
27 | tags: bacalhau
28 |
29 | - name: Systemd Daemon Reload
30 | become: true
31 | ansible.builtin.systemd:
32 | daemon_reload: true
33 |
34 | handlers:
35 | - name: Restart Bacalhau
36 | become: true
37 | ansible.builtin.service:
38 | name: requester
39 | state: restarted
40 | enabled: true
41 |
--------------------------------------------------------------------------------
/infrastructure/ansible/requirements.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | roles:
3 |
4 | collections:
5 | - name: community.general
6 | - name: community.docker
7 | - name: amazon.aws
8 | - name: community.aws
9 |
--------------------------------------------------------------------------------
/infrastructure/ansible/tasks/common_tasks.yaml:
--------------------------------------------------------------------------------
1 | - name: "Gather service facts"
2 | become: true
3 | ansible.builtin.service_facts:
4 |
5 | - name: "Disable unattended upgrades service"
6 | become: true
7 | ansible.builtin.systemd:
8 | name: "unattended-upgrades.service"
9 | enabled: false
10 | masked: true
11 | state: "stopped"
12 | when: ansible_facts.services["unattended-upgrades.service"] is defined
13 |
14 | - name: "Remove unattended upgrades"
15 | become: true
16 | ansible.builtin.package:
17 | name: "unattended-upgrades"
18 | state: absent
19 |
20 | - name: Install required system packages
21 | become: true
22 | ansible.builtin.apt:
23 | pkg:
24 | - aptitude
25 | - ca-certificates
26 | - curl
27 | - git
28 | - gnupg
29 | - gzip
30 | - lsb-release
31 | - pip
32 | - tar
33 | - unzip
34 | - jq
35 | - tree
36 | - net-tools
37 |
38 | - name: Install required pip packages
39 | become: true
40 | ansible.builtin.pip:
41 | name:
42 | - pip
43 | - boto3
44 | - pyopenssl
45 |
--------------------------------------------------------------------------------
/infrastructure/ansible/tasks/install_plex_tasks.yaml:
--------------------------------------------------------------------------------
1 | # {{ plex_dir }} must be writable by the user
2 | - name: Install required pip packages
3 | become: true
4 | ansible.builtin.pip:
5 | name:
6 | - github3.py
7 |
8 | - name: Print plex_version if provided
9 | ansible.builtin.debug:
10 | msg: "{{ plex_version }}"
11 | when: plex_version is defined
12 |
13 | - name: Get the latest plex release
14 | community.general.github_release:
15 | repo: plex
16 | user: labdao
17 | action: latest_release
18 | register: latest_release
19 |
20 | - name: Download the latest plex binary
21 | ansible.builtin.get_url:
22 | # https://github.com/labdao/plex/releases/download/v0.7.0/plex_0.7.0_linux_amd64.tar.gz
23 | url: https://github.com/labdao/plex/releases/download/{{ latest_release.tag }}/plex_{{ latest_release.tag | regex_replace('^v([0-9]\.[0-9][0-9]\.[0-9])$', '\1') }}_linux_amd64.tar.gz
24 | dest: /tmp/plex_release.tar.gz
25 | force: true
26 |
27 | - name: Unzip the plex release
28 | ansible.builtin.unarchive:
29 | src: /tmp/plex_release.tar.gz
30 | remote_src: true
31 | include: plex
32 | dest: "{{ plex_dir }}"
33 |
34 | - name: Link the plex binary to the system path
35 | become: true
36 | ansible.builtin.file:
37 | src: "{{ plex_dir }}/plex"
38 | dest: /usr/local/bin/plex
39 | state: link
40 |
--------------------------------------------------------------------------------
/infrastructure/ansible/tasks/pull_common_containers.yaml:
--------------------------------------------------------------------------------
1 | ---
2 |
3 | - name: Wait for docker to run
4 | become: true
5 | ansible.builtin.command: docker pull alpine
6 | register: docker_pull
7 | retries: 5
8 | delay: 3
9 | until: docker_pull.rc == 0
10 |
11 | - name: Pull commonly used containers
12 | become: true
13 | community.docker.docker_image:
14 | name: "{{ item }}"
15 | source: pull
16 | loop:
17 | - "ubuntu"
18 | - "alpine"
19 | - "docker.io/library/python:3.9-slim"
20 | - "docker.io/rapidsai/mambaforge-cuda:cuda11.8.0-base-ubuntu22.04-py3.9"
21 | - "docker.io/pytorch/pytorch:2.1.0-cuda11.8-cudnn8-runtime"
22 | - "ghcr.io/labdao/equibind:main@sha256:21a381d9ab1ff047565685044569c8536a55e489c9531326498b28d6b3cc244f"
23 | - "ghcr.io/labdao/equibind@sha256:ae2cec63b3924774727ed1c6c8af95cf4aaea2d3f0c5acbec56478505ccb2b07"
24 |
--------------------------------------------------------------------------------
/infrastructure/ansible/vars/teams.yaml:
--------------------------------------------------------------------------------
1 | # admins:
2 | # - josh
3 | # - aakaash
4 | # - humberto
5 | # teams:
6 | # - team: "labdao"
7 | # users:
8 | # - josh
9 | # - aakaash
10 | # - humberto
11 |
--------------------------------------------------------------------------------
/internal/ipfs/ipfs_test.go:
--------------------------------------------------------------------------------
1 | package ipfs
2 |
3 | import (
4 | "testing"
5 | )
6 |
7 | func TestPinDir(t *testing.T) {
8 | expectedCid := "QmWVKoVYBWHWdRLrL8Td5kUpqN2qH6zQ5piwtdCE1fjSYt"
9 | actualCid, err := PinDir("../../testdata/ipfs_test")
10 | if err != nil {
11 | t.Fatalf("error creating client: %v", err)
12 | }
13 | if expectedCid != actualCid {
14 | t.Errorf(`unmatching cids
15 | expected CID: %s
16 | actual CID: %s`, expectedCid, actualCid,
17 | )
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/internal/ipwl/testdata/example_equibind_io.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "tool": "tools/equibind.json",
4 | "inputs": {
5 | "protein": {
6 | "class": "File",
7 | "filepath": "*/testdata/binding/abl/7n9g.pdb"
8 | },
9 | "small_molecule": {
10 | "class": "File",
11 | "filepath": "*/testdata/binding/abl/ZINC000003986735.sdf"
12 | }
13 | },
14 | "outputs": {
15 | "best_docked_small_molecule": {
16 | "class": "File",
17 | "filepath": ""
18 | },
19 | "protein": {
20 | "class": "File",
21 | "filepath": ""
22 | }
23 | },
24 | "state": "created",
25 | "errMsg": ""
26 | },
27 | {
28 | "tool": "tools/equibind.json",
29 | "inputs": {
30 | "protein": {
31 | "class": "File",
32 | "filepath": "*/testdata/binding/abl/7n9g.pdb"
33 | },
34 | "small_molecule": {
35 | "class": "File",
36 | "filepath": "*/testdata/binding/abl/ZINC000019632618.sdf"
37 | }
38 | },
39 | "outputs": {
40 | "best_docked_small_molecule": {
41 | "class": "File",
42 | "filepath": ""
43 | },
44 | "protein": {
45 | "class": "File",
46 | "filepath": ""
47 | }
48 | },
49 | "state": "created",
50 | "errMsg": ""
51 | }
52 | ]
53 |
--------------------------------------------------------------------------------
/internal/ipwl/testdata/example_initial_io.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "tool": "tools/equibind.json",
4 | "inputs": {
5 | "protein": {
6 | "class": "File",
7 | "filepath": "/path/to/protein.pdb"
8 | },
9 | "small_molecule": {
10 | "class": "File",
11 | "filepath": "/path/to/small_molecule.sdf"
12 | }
13 | },
14 | "outputs": {
15 | "best_docked_small_molecule": {
16 | "class": "File",
17 | "filepath": ""
18 | },
19 | "protein": {
20 | "class": "File",
21 | "filepath": ""
22 | }
23 | },
24 | "state": "created",
25 | "errMsg": ""
26 | }
27 | ]
28 |
--------------------------------------------------------------------------------
/internal/ipwl/testdata/example_io.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "tool": {
4 | "name": "equibind",
5 | "ipfs": "QmZ2HarAgwZGjc3LBx9mWNwAQkPWiHMignqKup1ckp8NhB"
6 | },
7 | "inputs": {
8 | "protein": {
9 | "class": "File",
10 | "filepath": "/path/to/protein.pdb"
11 | },
12 | "small_molecule": {
13 | "class": "File",
14 | "filepath": "/path/to/small_molecule.sdf"
15 | }
16 | },
17 | "outputs": {
18 | "best_docked_small_molecule": {
19 | "class": "File",
20 | "filepath": "/path/to/best_docked_small_molecule.sdf"
21 | },
22 | "protein": {
23 | "class": "File",
24 | "filepath": "/path/to/protein.pdb"
25 | }
26 | },
27 | "state": "created",
28 | "errMsg": ""
29 | }
30 | ]
31 |
--------------------------------------------------------------------------------
/internal/web3/wallet.go:
--------------------------------------------------------------------------------
1 | package web3
2 |
3 | import (
4 | "regexp"
5 | )
6 |
7 | func IsValidEthereumAddress(address string) bool {
8 | pattern := `^0x[0-9a-fA-F]{40}$`
9 | match, _ := regexp.MatchString(pattern, address)
10 | return match
11 | }
12 |
--------------------------------------------------------------------------------
/jobs/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/labdao/plex/e473e40765ef82ab83f05cd6047dd3bb5bc0765e/jobs/.gitkeep
--------------------------------------------------------------------------------
/main.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "github.com/labdao/plex/cmd"
5 | )
6 |
7 | func main() {
8 | cmd.Execute()
9 | }
10 |
--------------------------------------------------------------------------------
/models/colabdesign/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this model will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
6 |
7 | # Versions available on the platform:
8 | ## v1.2 - 2024-04-16
9 |
10 | - refactored code with convexity base image.
11 | - has checkpoints toggle.
12 | - max and default num of binders = 8 (v1.1 had 240)
13 | - these versions let you change the number of binders, but the maximum number allowed is 8
14 | - max running time set to 45 minutes
15 |
--------------------------------------------------------------------------------
/models/colabdesign/conf/config.yaml:
--------------------------------------------------------------------------------
1 | defaults:
2 | - inputs: container
3 | - params: default
4 | - outputs: container
5 |
--------------------------------------------------------------------------------
/models/colabdesign/conf/inputs/container.yaml:
--------------------------------------------------------------------------------
1 | target_directory: /inputs
2 | target_pattern: target
--------------------------------------------------------------------------------
/models/colabdesign/conf/outputs/container.yaml:
--------------------------------------------------------------------------------
1 | directory: /outputs
--------------------------------------------------------------------------------
/models/colabdesign/conf/outputs/local.yaml:
--------------------------------------------------------------------------------
1 | directory: null
2 |
--------------------------------------------------------------------------------
/models/colabdesign/requirements.txt:
--------------------------------------------------------------------------------
1 | pyyaml==6.0.1
2 |
--------------------------------------------------------------------------------
/models/colabdesign/viz-requirements.txt:
--------------------------------------------------------------------------------
1 | Pillow==10.3.0
2 | molecularnodes==4.0.12
3 | pandas==2.2.2
4 | biopython==1.83
5 | pydantic==2.7.1
6 | selfies==2.1.1
--------------------------------------------------------------------------------
/models/labsay/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this model will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
6 |
7 | # Versions available on the platform:
8 | ## v1.4 - 2024-04-16
9 |
10 | - max running time set to 20 seconds. refactored labsay model with checkpoint compatible toggle
11 |
12 | # Older versions of the model (hidden/no longer available/archived)
13 | ## v1.0 - 2024-04-04
14 |
15 | - combined labsay model with the ability to toggle checkpoints on and off
16 |
17 | ## v0.4 - 2024-03-22
18 |
19 | - upload with random number for plddt and ipae
20 |
21 | ## v0.3 - 2024-03-21
22 |
23 | - upload to flowUUID/rayJobID/.... instead of just rayJobID/...
24 |
25 | ## v0.2 - 2024-03-16
26 |
27 | - Version and image update in the manifest. Otherwise same as v0.1
28 |
29 | ## v0.1 - 2024-03-15
30 |
31 | - Initial Release
--------------------------------------------------------------------------------
/models/labsay/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.9-slim
2 |
3 | WORKDIR /app
4 |
5 | COPY main.py /app
6 |
7 | RUN pip install --no-cache-dir boto3
8 |
9 | ENTRYPOINT ["python", "-u", "main.py"]
--------------------------------------------------------------------------------
/models/labsay/README.md:
--------------------------------------------------------------------------------
1 | # labsay-seq-only model
2 |
3 | Labsay is an internal model designed to merge the input functionality of the seq-only generator with the output of Labsay v0.8, to test the unified view frontend implementation and increase the development iteration speed.
4 |
5 | ## Features
6 |
7 | - Accepts seq-only generator inputs.
8 | - Takes in labsay sample checkpoint pdb inputs.
9 | - Uploads simulated checkpoints to s3.
10 |
11 | ## Points to note
12 |
13 | - This model is checkpoint compatible, so the model manifest reflects the same, with the `checkpointCompatible` flag set to `True`.
14 | - If you would like to test certain functionalities without checkpoints, and require checkpointCompatible = False, please set the above flag to false.
15 | - While the model mirrors labsay checkpoint inputs and seq-only generator inputs together, it does not take in the traditional file_example, number_example values during the experiment submission stage (like labsay v0.8).
16 |
17 | ## Steps to test locally:
18 |
19 | ```bash
20 | cd models/labsay/
21 | chmod +x test.sh
22 | ./test.sh
23 | ```
24 | (or)
25 |
26 | ### Note: Please refer to [CHANGELOG.md](./CHANGELOG.md) for steps to test the latest version of the model.
--------------------------------------------------------------------------------
/models/labsay/test.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | PLEX_JOB_INPUTS=$(cat user_input.json)
3 |
4 | docker build -t labsay .
5 |
6 | FLOW_UUID="test-flow_uuid_$(date +%y%m%d)"
7 | JOB_UUID="test-job_uuid_$(date +%y%m%d_%H%M%S)"
8 |
9 | OUTPUT_DIR="test-runs/outputs_$(date +%Y%m%d_%H%M%S)"
10 | mkdir -p "$PWD/$OUTPUT_DIR"
11 | echo "Output directory is $OUTPUT_DIR"
12 |
13 | docker run \
14 | -e PLEX_JOB_INPUTS="$PLEX_JOB_INPUTS" \
15 | -e FLOW_UUID="$FLOW_UUID" \
16 | -e JOB_UUID="$JOB_UUID" \
17 | -e CHECKPOINT_COMPATIBLE="False" \
18 | --env-file ~/aws.env \
19 | -v $PWD/testdata/inputs:/inputs/ \
20 | -v "$PWD/$OUTPUT_DIR":/outputs labsay
21 |
--------------------------------------------------------------------------------
/models/labsay/testdata/inputs/file_example/message.txt:
--------------------------------------------------------------------------------
1 | this is the example_file contents
2 | =^..^=
3 |
--------------------------------------------------------------------------------
/models/labsay/user_input.json:
--------------------------------------------------------------------------------
1 | {
2 | "binder_protein_sequence": "MKKAVINGEQIRSISDLHQTLKKELALPEYYGENLDALWDALTGWxxxxLVLEWRQFEQSKQLTENGAESVLQVFREAKAEGADITIILS",
3 | "target_protein_sequence": "MGSSHHHHHHSQAPIEGRAQVINTFDGVADYLQTYHKLPDNYITKSEAQALGWVASKGNLCDVAPGKSIGGDIFSNREGKLPGKSGRTWREADINYTCGFRNSDRILYSSDWLIYKTTDHYQTFTKIR",
4 | "number_of_binders": 3,
5 | "init_permissibility_vec": "",
6 | "hotspots": "",
7 | "high_fidelity": true,
8 | "pdb_checkpoint_0": "/inputs/pdb_checkpoints/example.pdb",
9 | "pdb_checkpoint_1": "/inputs/pdb_checkpoints/design_1.pdb",
10 | "pdb_checkpoint_2": "/inputs/pdb_checkpoints/BioCD202b18_aa_7fd4f_unrelaxed_rank_003_alphafold2_multimer_v3_model_2_seed_000.pdb",
11 | "speedup": true
12 | }
--------------------------------------------------------------------------------
/models/ray/simulate-protein-complex.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "relay-colabfold-v1.8",
3 | "description": "Simulate a protein complex using ColabFold",
4 | "task": "protein folding",
5 | "taskCategory": "protein-binder-design",
6 | "modelType": "ray",
7 | "checkpointCompatible": true,
8 | "xAxis": "plddt",
9 | "yAxis": "i_pae",
10 | "jobType": "service",
11 | "rayEndpoint": "/relay_simulate_protein_complex",
12 | "computeCost": 10,
13 | "inputs": {
14 | "binder_sequence": {
15 | "type": "string",
16 | "description": "The sequence of the binder protein",
17 | "required": true,
18 | "default": "PTVEAVTLLAKGEKEP",
19 | "example": "PTVEAVTLLAKGEKEP"
20 | },
21 | "target_sequence": {
22 | "type": "string",
23 | "description": "The sequence of the target protein",
24 | "required": true,
25 | "default": "ASISTGGGN",
26 | "example": "ASISTGGGN"
27 | }
28 | },
29 | "outputs": {
30 | "pdb": {
31 | "type": "File",
32 | "glob": ["*.pdb"]
33 | }
34 | }
35 | }
--------------------------------------------------------------------------------
/pkg/README.md:
--------------------------------------------------------------------------------
1 | Placeholder for future packages that can be imported by 3rd parties
--------------------------------------------------------------------------------
/receptor/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM golang:1.18 as builder
2 |
3 | ADD https://truststore.pki.rds.amazonaws.com/global/global-bundle.pem /global-bundle.pem
4 |
5 | COPY . /app/
6 | RUN cd /app/ \
7 | && CGO_ENABLED=0 go build -o /go/bin/receptor
8 |
9 | FROM scratch
10 |
11 | COPY --from=builder /go/bin/receptor /receptor
12 | COPY --from=builder /global-bundle.pem /global-bundle.pem
13 |
14 | ENV PGHOST=postgres.example.com
15 | ENV PGUSER=receptor
16 | ENV PGDATABASE=receptor
17 | ENV PGSSLMODE=verify-ca
18 | ENV PGSSLROOTCERT=/global-bundle.pem
19 |
20 | EXPOSE 8080
21 |
22 | ENTRYPOINT ["/receptor"]
23 |
--------------------------------------------------------------------------------
/receptor/main.go:
--------------------------------------------------------------------------------
1 | package main
2 |
3 | import (
4 | "encoding/json"
5 | "log"
6 | "net/http"
7 |
8 | "github.com/gin-gonic/gin"
9 | "github.com/labdao/receptor/models"
10 | "gorm.io/gorm/clause"
11 | )
12 |
13 | func main() {
14 | log.Print("Connecting to database")
15 | models.ConnectDatabase()
16 |
17 | r := gin.Default()
18 |
19 | log.Print("Setting up routes")
20 | r.GET("/_health_check", health)
21 | r.POST("/judge", judge)
22 |
23 | r.Run()
24 | }
25 |
26 | func health(c *gin.Context) {
27 | c.JSON(http.StatusOK, gin.H{"status": "ok"})
28 | }
29 |
30 | func judge(c *gin.Context) {
31 | var requestPayload models.JobModel
32 |
33 | if err := c.BindJSON(&requestPayload); err != nil {
34 | c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
35 | return
36 | }
37 |
38 | // Extract Job.ID from the JSON data
39 | var jobID struct{ ID string }
40 | if err := json.Unmarshal(requestPayload.Spec, &jobID); err != nil {
41 | c.JSON(http.StatusBadRequest, gin.H{"error": err.Error()})
42 | return
43 | }
44 |
45 | // Create a new JobModel instance
46 | jobModel := models.JobModel{
47 | NodeID: requestPayload.NodeID,
48 | Spec: requestPayload.Spec,
49 | JobID: jobID.ID,
50 | }
51 |
52 | // Create or update the record in the database
53 | models.DB.Clauses(clause.OnConflict{DoNothing: true}).Create(&jobModel)
54 |
55 | c.JSON(http.StatusOK, gin.H{})
56 | }
57 |
--------------------------------------------------------------------------------
/receptor/models/job.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import (
4 | "gorm.io/datatypes"
5 | "gorm.io/gorm"
6 | )
7 |
8 | type JobModel struct {
9 | gorm.Model
10 | NodeID string `json:"NodeID"`
11 | Spec datatypes.JSON `json:"Job" gorm:"column:spec"` // Store Job object in JSON format
12 | JobID string `gorm:"column:job_id;uniqueIndex"` // Extracted Job.ID field
13 | }
14 |
15 | type Tabler interface {
16 | TableName() string
17 | }
18 |
19 | // TableName overrides the table name
20 | func (JobModel) TableName() string {
21 | return "jobs"
22 | }
23 |
--------------------------------------------------------------------------------
/receptor/models/setup.go:
--------------------------------------------------------------------------------
1 | package models
2 |
3 | import (
4 | "fmt"
5 | "log"
6 | "os"
7 |
8 | "gorm.io/driver/postgres"
9 | "gorm.io/gorm"
10 | "gorm.io/gorm/logger"
11 | )
12 |
13 | var DB *gorm.DB
14 |
15 | func ConnectDatabase() {
16 |
17 | dsn := fmt.Sprintf(
18 | "host=%s user=%s password=%s dbname=%s",
19 | os.Getenv("PGHOST"),
20 | os.Getenv("PGUSER"),
21 | os.Getenv("PGPASSWORD"),
22 | os.Getenv("PGDATABASE"),
23 | )
24 | database, err := gorm.Open(postgres.Open(dsn), &gorm.Config{
25 | Logger: logger.Default.LogMode(logger.Silent),
26 | })
27 |
28 | if err != nil {
29 | panic("Failed to connect to database!")
30 | }
31 |
32 | log.Print("Migrating database")
33 | err = database.AutoMigrate(&JobModel{})
34 | if err != nil {
35 | return
36 | }
37 |
38 | DB = database
39 | }
40 |
--------------------------------------------------------------------------------
/scripts/asciicast/asciicast-commands.txt:
--------------------------------------------------------------------------------
1 | ./plex -tool equibind -input-dir testdata/binding/abl
2 | ./plex -tool equibind -input-dir testdata/binding/pdbind_processed_size1
3 | ./plex -tool equibind -input-dir testdata/binding/pdbind_processed_size2
4 | ./plex -tool colabfold-mini -input-dir testdata/folding -gpu=true -network=true
5 | ./plex -tool oddt -input-dir testdata/scoring/abl
--------------------------------------------------------------------------------
/scripts/asciicast/create-asciicast.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # run inside container image with asciinema installed
3 |
4 | # File containing the list of CLI commands to record
5 | commands_file="scripts/asciicast/asciicast-commands.txt"
6 |
7 | # Display the commands in the file
8 | while IFS= read -r cmd; do
9 | echo "Command: $cmd"
10 | done < "$commands_file"
11 |
12 | # Read commands from the file into an array
13 | commands=()
14 | while IFS= read -r cmd; do
15 | commands+=("$cmd")
16 | done < "$commands_file"
17 |
18 | # Loop through the commands array and create a recording for each one
19 | for cmd in "${commands[@]}"; do
20 | # Create a unique filename for each recording
21 | filename="$(echo "$cmd" | tr -d '[:space:]/' | tr -cd '[:alnum:]._-').cast"
22 |
23 | # Create a script to simulate typing the command character by character
24 | typed_cmd_script="tmp.sh"
25 | echo "#!/bin/bash" > "$typed_cmd_script"
26 | for ((i=0; i<${#cmd}; i++)); do
27 | echo "printf '%s' '${cmd:$i:1}'" >> "$typed_cmd_script"
28 | echo "sleep 0.1" >> "$typed_cmd_script"
29 | done
30 | echo "printf '\n'" >> "$typed_cmd_script"
31 | echo "$cmd" >> "$typed_cmd_script"
32 | echo "exit" >> "$typed_cmd_script"
33 | chmod +x "$typed_cmd_script"
34 |
35 | # Start the recording, execute the command, and then exit the shell
36 | asciinema rec -c "bash $typed_cmd_script" scripts/asciicast/$filename -y -i 2 --overwrite
37 |
38 | # Cleanup the temporary script
39 | rm -f "$typed_cmd_script"
40 | done
--------------------------------------------------------------------------------
/testdata/ipfs_test/haiku2.txt:
--------------------------------------------------------------------------------
1 | Science unbound flows
2 | Decentralized, it shines bright
3 | Knowledge spreads its wings.
4 |
--------------------------------------------------------------------------------
/testdata/ipfs_test/test.csv:
--------------------------------------------------------------------------------
1 | A,B,C
2 | 1,2,3
3 | 4,5,6
4 | testing,testing,testing
--------------------------------------------------------------------------------
/testdata/ipfs_test/testset_csv.csv:
--------------------------------------------------------------------------------
1 | Protein,Protein File,Ligand File
2 | 6d08,6d08/6d08_protein_processed.pdb,6d08/6d08_ligand.sdf
3 | 6jap,6jap/6jap_protein_processed.pdb,6jap/6jap_ligand.sdf
4 | 6hop,6hop/6hop_protein_processed.pdb,6hop/6hop_ligand.sdf
5 | 6qqq,6qqq/6qqq_protein_processed.pdb,6qqq/6qqq_ligand.mol2
6 | 6qqu,6qqu/6qqu_protein_processed.pdb,6qqu/6qqu_ligand.mol2
--------------------------------------------------------------------------------