├── .gitignore ├── CODE_OF_CONDUCT.md ├── LICENSE ├── README.md ├── SECURITY.md ├── SUPPORT.md ├── cookiecutter.json ├── docs └── onion-architecture-article.md └── {{cookiecutter.repo_name}} ├── Cargo.toml ├── diesel.toml ├── migrations ├── .keep ├── 00000000000000_diesel_initial_setup │ ├── down.sql │ └── up.sql └── 2022-11-25-082807_create_todo │ ├── down.sql │ └── up.sql ├── scripts └── run_postgres.sh └── src ├── api ├── controllers │ ├── mod.rs │ ├── service_context_handlers.rs │ └── todo_handler.rs ├── dto │ ├── mod.rs │ └── todo.rs ├── middleware.rs └── mod.rs ├── container.rs ├── create_app.rs ├── domain ├── constants.rs ├── error.rs ├── mod.rs ├── models │ ├── mod.rs │ ├── service_context.rs │ └── todo.rs ├── repositories │ ├── mod.rs │ ├── repository.rs │ └── todo.rs └── services │ ├── mod.rs │ ├── service_context.rs │ └── todo.rs ├── infrastructure ├── databases │ ├── mod.rs │ └── postgresql.rs ├── error.rs ├── mod.rs ├── models │ ├── mod.rs │ ├── service_context.rs │ └── todo.rs ├── repositories │ ├── mod.rs │ └── todo.rs ├── schema.rs └── services │ ├── mod.rs │ └── service_context.rs ├── lib.rs ├── main.rs ├── services ├── mod.rs └── todo.rs └── tests ├── api ├── mod.rs ├── test_service_context_controller.rs └── test_todo_controllers.rs └── mod.rs /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | debug/ 4 | target/ 5 | 6 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 7 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 8 | Cargo.lock 9 | 10 | # These are backup files generated by rustfmt 11 | **/*.rs.bk 12 | 13 | # MSVC Windows builds of rustc generate these, which store debugging information 14 | *.pdb 15 | 16 | .env 17 | .idea -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Microsoft Open Source Code of Conduct 2 | 3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/). 4 | 5 | Resources: 6 | 7 | - [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/) 8 | - [Microsoft Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/) 9 | - Contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with questions or concerns 10 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) Microsoft Corporation. 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cookiecutter actix simple clean architecture 2 | This is a reusable Rust Cookiecutter template. The project is based on Actix web in combination with Diesel ORM. 3 | 4 | Complete list of features the template provides: 5 | * Onion architecture 6 | * Actix Web 7 | * Maintenance window support 8 | * Diesel ORM 9 | * Database migrations 10 | * Local postgres database docker support 11 | * Test containers integration for testing 12 | 13 | ## Getting started 14 | To start a new project, run the following command: 15 | ```bash 16 | cookiecutter https://github.com/microsoft/cookiecutter-rust-actix-clean-architecture 17 | ``` 18 | This will prompt you for some information about your project. The information 19 | you provide will be used to populate the files in the new project directory. 20 | 21 | You can then build the project locally. 22 | ```bash 23 | cargo build 24 | ``` 25 | 26 | ## Architecture 27 | The application follows the Onion Architecture pattern. An article is written 28 | about our experience integrating an onion architecture with actix web in combination with diesel ORM that can 29 | be found [here](./docs/onion-architecture-article.md). 30 | 31 | This architecture is a design pattern that organizes the codebase of a software application into multiple layers, where the innermost layer 32 | is the domain layer and the outermost layer is the application layer. Each layer depends only on the layers inside of it and not on the layers outside of it, 33 | creating a separation of concerns, allowing for a more maintainable and scalable codebase. 34 | 35 | For this template we suggest using a service-repository design pattern. For example implementations you can have a look at 36 | 37 | 38 | ## Running the application locally 39 | To run the application locally, you need to have a Postgres database running. 40 | You can use the `run_postgres.sh` script in the `scripts` directory to run a Postgres container. 41 | ```bash 42 | ./scripts/run_postgres.sh 43 | ``` 44 | 45 | You can then run the application. 46 | ```bash 47 | cargo run 48 | ``` 49 | 50 | ## Testing support 51 | All tests are can be found under the `src/tests` folder. When using the template 52 | you can place all you tests in this folder. 53 | 54 | To run the tests, you can use the following command: 55 | ```bash 56 | cargo test 57 | ``` 58 | To run the tests with error output you can run the following command: 59 | ```bash 60 | cargo test -- --nocapture 61 | ``` 62 | or 63 | ```bash 64 | cargo test -- --show-output 65 | ``` 66 | 67 | ## Diesel ORM 68 | The template uses Diesel ORM for its database connection and database models 69 | integration. Its is currently setup with postgres, however you can 70 | change it to any other database that is supported by diesel. For other databases 71 | have a look at the official Diesel documentation that can be found [here](https://diesel.rs/) 72 | 73 | ### Database migrations 74 | 75 | 1) Make sure you have the diesel cli installed. You can install it with the following command: 76 | ```bash 77 | cargo install diesel_cli --no-default-features --features postgres 78 | ``` 79 | 2) Add your postgres database url to the .env file: 80 | ```bash 81 | echo DATABASE_URL=postgres://username:password@localhost/diesel_demo > .env 82 | ``` 83 | 3) Setup diesel before creating a migration: 84 | ```bash 85 | diesel setup 86 | ``` 87 | 4) Create a migration with the following command: 88 | ```bash 89 | diesel migration generate 90 | ``` 91 | 5) Apply your migrations: 92 | ```bash 93 | diesel migration run 94 | ``` 95 | 96 | ## Service repository design pattern 97 | 98 | ### Diesel Repositories 99 | The onion architecture is best being used with a repository-service pattern. An example 100 | repository can be seen below: 101 | 102 | ```rust 103 | // Can be placed under /src/domain/repositories/todo.rs 104 | #[derive(Debug, Serialize, Deserialize)] 105 | pub struct TodoQueryParams { 106 | pub limit: Option, 107 | pub offset: Option, 108 | pub title: Option, 109 | } 110 | 111 | impl QueryParams for TodoQueryParams { 112 | fn limit(&self) -> i64 { 113 | self.limit.or(DEFAULT_LIMIT).unwrap_or_default() 114 | } 115 | fn offset(&self) -> i64 { 116 | self.offset.or(DEFAULT_OFFSET).unwrap_or_default() 117 | } 118 | } 119 | 120 | #[async_trait] 121 | pub trait TodoRepository: Send + Sync { 122 | async fn create(&self, new_todo: &CreateTodo) -> RepositoryResult; 123 | async fn list(&self, params: TodoQueryParams) -> RepositoryResult>; 124 | async fn get(&self, todo_id: i32) -> RepositoryResult; 125 | async fn delete(&self, todo_id: i32) -> RepositoryResult<()>; 126 | } 127 | ``` 128 | 129 | ```rust 130 | // Can be placed under /src/infrastructure/repositories/todo.rs 131 | pub struct TodoDieselRepository { 132 | pub pool: Arc 133 | } 134 | 135 | impl TodoDieselRepository { 136 | pub fn new(db: Arc) -> Self { 137 | TodoDieselRepository { pool: db } 138 | } 139 | } 140 | 141 | #[async_trait] 142 | impl TodoRepository for TodoDieselRepository { 143 | 144 | async fn create(&self, new_todo: &CreateTodo) -> RepositoryResult { 145 | use crate::infrastructure::schema::todos::dsl::todos; 146 | let new_todo_diesel: CreateTodoDiesel = CreateTodoDiesel::from(new_todo.clone()); 147 | let mut conn = self.pool.get().unwrap(); 148 | let result: TodoDiesel = run(move || diesel::insert_into(todos).values(new_todo_diesel) 149 | .get_result(&mut conn)) 150 | .await 151 | .map_err(|v| DieselRepositoryError::from(v).into_inner())?; 152 | Ok(result.into()) 153 | } 154 | 155 | async fn list(&self, params: TodoQueryParams) -> RepositoryResult> { 156 | use crate::infrastructure::schema::todos::dsl::todos; 157 | let pool = self.pool.clone(); 158 | let builder = todos.limit(params.limit()).offset(params.offset()); 159 | let result = run(move || { 160 | let mut conn = pool.get().unwrap(); 161 | builder.load::(&mut conn) 162 | }) 163 | .await 164 | .map_err(|v| DieselRepositoryError::from(v).into_inner())?; 165 | Ok(ResultPaging { 166 | total: 0, 167 | items: result.into_iter().map(|v| v.into()).collect() 168 | }) 169 | } 170 | 171 | async fn get(&self, todo_id: i32) -> RepositoryResult { 172 | use crate::infrastructure::schema::todos::dsl::{id, todos}; 173 | let mut conn = self.pool.get().unwrap(); 174 | run(move || todos.filter(id.eq(todo_id)).first::(&mut conn)) 175 | .await 176 | .map_err(|v| DieselRepositoryError::from(v).into_inner()) 177 | .map(|v| -> Todo { v.into() }) 178 | } 179 | 180 | async fn delete(&self, todo_id: i32) -> RepositoryResult<()> { 181 | use crate::infrastructure::schema::todos::dsl::{id, todos}; 182 | let mut conn = self.pool.get().unwrap(); 183 | run(move || diesel::delete(todos).filter(id.eq(todo_id)) 184 | .execute(&mut conn)) 185 | .await 186 | .map_err(|v| DieselRepositoryError::from(v).into_inner())?; 187 | Ok(()) 188 | } 189 | } 190 | ``` 191 | 192 | ### Services 193 | The onion architecture is best being used with a repository-service pattern. An example 194 | service can be seen below: 195 | ```rust 196 | // Can be placed under /src/services/todo.rs 197 | #[derive(Clone)] 198 | pub struct TodoServiceImpl { 199 | pub repository: Arc, 200 | } 201 | 202 | impl TodoServiceImpl { 203 | pub fn new(repository: Arc) -> Self { 204 | TodoServiceImpl { 205 | repository, 206 | } 207 | } 208 | } 209 | 210 | #[async_trait] 211 | impl TodoService for TodoServiceImpl { 212 | async fn create(&self, todo: CreateTodo) -> Result { 213 | let mut cloned = todo.clone(); 214 | self.repository 215 | .create(&mut cloned) 216 | .await 217 | .map_err(|e| -> CommonError { e.into() }) 218 | } 219 | 220 | async fn list(&self, params: TodoQueryParams) -> Result, CommonError> { 221 | self.repository 222 | .list(params) 223 | .await 224 | .map_err(|e| -> CommonError { e.into() }) 225 | } 226 | 227 | async fn get(&self, todo_id: i32) -> Result { 228 | self.repository 229 | .get(todo_id) 230 | .await 231 | .map_err(|e| -> CommonError { e.into() }) 232 | } 233 | 234 | async fn delete(&self, todo_id: i32) -> Result<(), CommonError> { 235 | self.repository 236 | .delete(todo_id) 237 | .await 238 | .map_err(|e| -> CommonError { e.into() }) 239 | } 240 | } 241 | ``` 242 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | ## Security 4 | 5 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/). 6 | 7 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets [Microsoft's definition of a security vulnerability](https://aka.ms/opensource/security/definition), please report it to us as described below. 8 | 9 | ## Reporting Security Issues 10 | 11 | **Please do not report security vulnerabilities through public GitHub issues.** 12 | 13 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://aka.ms/opensource/security/create-report). 14 | 15 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the [Microsoft Security Response Center PGP Key page](https://aka.ms/opensource/security/pgpkey). 16 | 17 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://aka.ms/opensource/security/msrc). 18 | 19 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue: 20 | 21 | * Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.) 22 | * Full paths of source file(s) related to the manifestation of the issue 23 | * The location of the affected source code (tag/branch/commit or direct URL) 24 | * Any special configuration required to reproduce the issue 25 | * Step-by-step instructions to reproduce the issue 26 | * Proof-of-concept or exploit code (if possible) 27 | * Impact of the issue, including how an attacker might exploit the issue 28 | 29 | This information will help us triage your report more quickly. 30 | 31 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://aka.ms/opensource/security/bounty) page for more details about our active programs. 32 | 33 | ## Preferred Languages 34 | 35 | We prefer all communications to be in English. 36 | 37 | ## Policy 38 | 39 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://aka.ms/opensource/security/cvd). 40 | 41 | 42 | -------------------------------------------------------------------------------- /SUPPORT.md: -------------------------------------------------------------------------------- 1 | # TODO: The maintainer of this repo has not yet edited this file 2 | 3 | **REPO OWNER**: Do you want Customer Service & Support (CSS) support for this product/project? 4 | 5 | - **No CSS support:** Fill out this template with information about how to file issues and get help. 6 | - **Yes CSS support:** Fill out an intake form at [aka.ms/onboardsupport](https://aka.ms/onboardsupport). CSS will work with/help you to determine next steps. 7 | - **Not sure?** Fill out an intake as though the answer were "Yes". CSS will help you decide. 8 | 9 | *Then remove this first heading from this SUPPORT.MD file before publishing your repo.* 10 | 11 | # Support 12 | 13 | ## How to file issues and get help 14 | 15 | This project uses GitHub Issues to track bugs and feature requests. Please search the existing 16 | issues before filing new issues to avoid duplicates. For new issues, file your bug or 17 | feature request as a new Issue. 18 | 19 | For help and questions about using this project, please **REPO MAINTAINER: INSERT INSTRUCTIONS HERE 20 | FOR HOW TO ENGAGE REPO OWNERS OR COMMUNITY FOR HELP. COULD BE A STACK OVERFLOW TAG OR OTHER 21 | CHANNEL. WHERE WILL YOU HELP PEOPLE?**. 22 | 23 | ## Microsoft Support Policy 24 | 25 | Support for this **PROJECT or PRODUCT** is limited to the resources listed above. 26 | -------------------------------------------------------------------------------- /cookiecutter.json: -------------------------------------------------------------------------------- 1 | { 2 | "repo_name": "Your Project Name" 3 | } -------------------------------------------------------------------------------- /docs/onion-architecture-article.md: -------------------------------------------------------------------------------- 1 | # Rust onion architecture with actix and diesel 2 | > All the source code of this article can be found [here](https://github.com/coding-kitties/cookiecutter-actix-simple-clean-architecture) and [here](https://github.com/microsoft/cookiecutter-rust-actix-clean-architecture) 3 | 4 | This article describes an onion architecture implementation with Rust using 5 | actix and diesel. 6 | 7 | Onion Architecture is a software design pattern that organizes the codebase into 8 | multiple layers. Each layer depends only on the layers inside of it and not on the layers outside of it, 9 | This creates a separation of concerns, which allows for a more maintainable and scalable codebase. 10 | 11 | Actix web is a popular and widely used web framework for Rust that provides a high-performance 12 | and scalable foundation for building web applications. It is built on top of the 13 | Actix actor framework, which allows for a clear separation of concerns and 14 | modularity, making it a good fit for implementing the application layer of the onion architecture 15 | 16 | # Why onion architecture for a rust based web application? 17 | Onion architectures are commonly used by software engineering teams and in a wide amount of program languages. 18 | It is therefore interesting to see how Rust functions in such an architecture. 19 | Given that Rust is a new language compared to languages such as C#, java, C++, its is interesting to see how rust can 20 | adapt a standard such as onion architecture. If rust is suitable for such an architecture it can be a decision driver for 21 | new applications to adapt rust. 22 | 23 | Rust is know for its speed, however, even if rust is fast compared to other languages, production ready web applications 24 | typically use technologies such as a databases, caching or ORM's. These systems can slow a language down. However, taking into account 25 | that a standard production ready Rust web application would use these systems, we can still see that 26 | Rust outperforms with a wide marging other languages. 27 | 28 | ## Architecture Overview 29 | The onion architecture is a layered architecture that is based on the onion model. 30 | Where each layer in the onion model is used to define the different layers of an application. 31 | 32 | For this rust implementation 4 layers are used. 33 | * api (app) module: The outermost layer that contains the controllers and the endpoints definition, serialization and deserialization of the data, validation and error handling. 34 | * infrastructure: Layer that typically include database connections, external APIs calls, logging and configuration management. 35 | * services: Layer that contains the application's services, which encapsulate the core business logic and provide a higher-level abstraction for the application to interact with the domain entities. 36 | * domain: The innermost layer that contains the core business logic and entities of the application. 37 | 38 | 39 | Folder structure: 40 | ``` 41 | . 42 | ├── migrations 43 | ├── scripts 44 | │ └── run_postgres.sh # Run postgres in docker locally 45 | ├── src 46 | │ ├── api 47 | │ │ ├── controllers 48 | │ │ │ └── ... # controllers for the api 49 | │ │ ├── dto # Data transfer objects 50 | │ │ │ └── ... # Individual DTOs 51 | │ │ └── errors.py 52 | │ ├── infrastructure 53 | │ │ ├── services 54 | │ │ │ └── ... # Services that use third party libraries or services (e.g. email service) 55 | │ │ ├── databases 56 | │ │ │ └── ... # Database adapaters and initialization 57 | │ │ ├── repositories 58 | │ │ │ └── ... # Repositories for interacting with the databases 59 | │ │ └── models 60 | │ │ └── ... # Database models 61 | │ ├── domain 62 | │ │ ├── mod.rs 63 | │ │ ├── constants.rs 64 | │ │ ├── errors.rs 65 | │ │ ├── models 66 | │ │ │ └── ... # Business logic models traits or structs 67 | │ │ ├── services 68 | │ │ │ └── ... # Service traits 69 | │ │ └── repositories 70 | │ │ └── ... # Repository traits 71 | │ ├── services 72 | │ │ └── ... # Concrete service implementation for interacting with the domain (business logic) 73 | │ ├── container.rs 74 | │ ├── create_app.rs # app factory 75 | │ ├── lib.rs 76 | │ └── main.rs 77 | ``` 78 | 79 | * migrations: Alembic's migration scripts are stored here. 80 | * scripts: contains the application's configuration settings. 81 | 82 | ## Is rust suitable for onion architecture? 83 | The clear separation of concerns and modularity that the onion architecture 84 | provides aligns well with Rust's design philosophy. Rust's support for 85 | creating libraries and its strict type system allows for building well-defined, 86 | decoupled and easily testable components, making it a good choice for 87 | implementing the domain, services, and infrastructure layers of the 88 | onion architecture. Additionally, the Rust ecosystem has a growing number 89 | of libraries and frameworks that support various use cases, such as database 90 | access, web development, and more, Actix in particular can easily integrate 91 | with other Rust libraries and frameworks, such as Diesel for database access, 92 | making it possible to build a full-stack application using the onion architecture with Rust. 93 | 94 | The most problems you will face is the sharing your services 95 | and repositories between the different layers. There is currently for actix no 96 | standardized or commonly used dependency injection framework. 97 | 98 | 99 | ## Diesel adapter setup in the infrastructure layer 100 | ["The database is not the center. Its external"][onion-architecture] 101 | 102 | Diesel is a popular and widely used Object Relational Mapping (ORM) 103 | library for Rust, and can be used in the infrastructure layer of an 104 | onion architecture. To set up Diesel in the infrastructure layer, the first 105 | step is to add Diesel as a dependency to the project's Cargo.toml file. Then, 106 | you need to define the schema for the database tables using Diesel's schema 107 | macro, which will generate the necessary Rust code to interact with the 108 | database. After that, you can create a database connection and set up the 109 | connection pool using the Diesel connection library. The connection pool 110 | can be used to manage multiple database connections, allowing for efficient 111 | reuse and sharing of database connections. To perform database operations, 112 | you can use Diesel's query builder to construct and execute database queries. 113 | When used in the infrastructure layer of an onion architecture, Diesel provides 114 | a clean and efficient way to interact with a database, abstracting the 115 | underlying database technology and allowing the services layer to interact 116 | with the database through a well-defined interface. 117 | 118 | Diesel currently does not support async behaviour. There is now a project being made by 119 | the creators of diesel to address this issue. [async diesel]() 120 | 121 | However, to mitigate this issue, we will leverage actix to run our database orm operations in 122 | a seperate thread. We did this by using the `actix_web::web::Data` library as can be seen in the 123 | code snippet below: 124 | 125 | 126 | 127 | ## Actix architecture adaptation 128 | Factory method where all dependencies are initialized 129 | 130 | Actix web was originally based on Actix and an actor framework. 131 | Actors are objects which encapsulate state and behavior, they communicate exclusively by exchanging messages. Actix actors are implemented on top of Tokio. Multiple actors can run in same thread. 132 | 133 | Actix web still leans heavily on the actor model, therefore you will encounter some concepts that are related to the actor model. 134 | For example, the `actix_web::web::Data` is a shared state between all the actors. 135 | In other languages, you would use dependency injection to inject the dependencies into the actors. In rust, you would use the `actix_web::web::Data` to share the state between the actors. 136 | 137 | Share all services through the app state. 138 | 139 | Actix web can be adapted to fit within the onion architecture by using it to 140 | implement the application layer. In this architecture, the application layer 141 | is responsible for handling the input and output of the application and 142 | coordinating the use of services. Actix web provides a simple and intuitive 143 | API for defining endpoints, handling HTTP requests, and returning responses, 144 | making it a suitable choice for implementing the application layer. It can use 145 | the services' public APIs to process requests and return responses, and can 146 | integrate with the infrastructure layer, such as using Diesel for database 147 | access, to perform database operations as needed. By adapting Actix web to 148 | fit within the onion architecture, it becomes a flexible and scalable tool 149 | for building web applications with clear separation of concerns and modularity. 150 | This makes it easier to maintain and evolve the application over time, as well 151 | as making it easier to test and debug individual components. Additionally, the 152 | strict modularity of the onion architecture makes it easier to swap out 153 | components with different implementations as needed. 154 | 155 | 156 | ## What are common pitfalls with a rust onion architecture 157 | While Actix web and the onion architecture can be a powerful combination 158 | for building web applications in Rust, there are some common pitfalls to 159 | watch out for. One pitfall is overloading the application layer with too 160 | much logic, as this can result in a tightly-coupled and difficult-to-maintain 161 | application. It is important to maintain a clear separation of concerns 162 | and only perform necessary logic in the application layer, delegating other 163 | operations to the services layer. 164 | 165 | Another pitfall is not properly defining the APIs between the different layers, 166 | which can lead to tight coupling and make it difficult to modify or replace components in the future. 167 | It is important to carefully design and document the APIs between the different 168 | layers to ensure that they are flexible and maintainable over time. 169 | 170 | One example of this principle is the repository trait that we defined in our 171 | domain layer. Services that will use a repository for the logic will depend on 172 | the trait. Services will be unaware of the concrete implementation that they will use such as a diesel 173 | based repository. 174 | 175 | Additionally, not properly handling errors and exceptions can lead to 176 | unexpected behavior and crashes in the application. It is important to 177 | implement proper error handling and logging to ensure that the application 178 | is robust and can recover gracefully from unexpected events. Overall, careful 179 | design and implementation are key to avoiding these common pitfalls and 180 | building a successful Actix web and onion architecture based web application 181 | in Rust. 182 | 183 | ## Conclusion 184 | 185 | 186 | [clean-architecture]: https://blog.cleancoder.com/uncle-bob/2012/08/13/the-clean-architecture.html 187 | [hexagonal-architecture]: https://en.wikipedia.org/wiki/Hexagonal_architecture_(software) 188 | [onion-architecture]: https://jeffreypalermo.com/2008/07/the-onion-architecture-part-1/ 189 | [rust]: https://www.rust-lang.org/ -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "actix-clean-architecture" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | actix-web = "4" 10 | actix-threadpool = "0.3.3" 11 | serde = { version = "1.0", features = ["derive"] } 12 | diesel = { version = "2.0.0", features = ["postgres", "r2d2", "chrono", "serde_json"] } 13 | diesel_migrations = "2.0.0" 14 | async-trait = "0.1.58" 15 | dotenv = { version = "0.15" } 16 | testcontainers = { version = "0.14.0" } 17 | env_logger = "0.10.0" 18 | log = "0.4" 19 | serde_json = "1.0" 20 | futures-util = "0.3.26" 21 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/diesel.toml: -------------------------------------------------------------------------------- 1 | # For documentation on how to configure this file, 2 | # see https://diesel.rs/guides/configuring-diesel-cli 3 | 4 | [print_schema] 5 | file = "src/infrastructure/schema.rs" 6 | 7 | [migrations_directory] 8 | dir = "migrations" 9 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/migrations/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/cookiecutter-rust-actix-clean-architecture/9cedb0aa7b3a5b8527bd5f7caa45d2ed65b2addd/{{cookiecutter.repo_name}}/migrations/.keep -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/migrations/00000000000000_diesel_initial_setup/down.sql: -------------------------------------------------------------------------------- 1 | -- This file was automatically created by Diesel to setup helper functions 2 | -- and other internal bookkeeping. This file is safe to edit, any future 3 | -- changes will be added to existing projects as new migrations. 4 | 5 | DROP FUNCTION IF EXISTS diesel_manage_updated_at(_tbl regclass); 6 | DROP FUNCTION IF EXISTS diesel_set_updated_at(); 7 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/migrations/00000000000000_diesel_initial_setup/up.sql: -------------------------------------------------------------------------------- 1 | -- This file was automatically created by Diesel to setup helper functions 2 | -- and other internal bookkeeping. This file is safe to edit, any future 3 | -- changes will be added to existing projects as new migrations. 4 | 5 | 6 | 7 | 8 | -- Sets up a trigger for the given table to automatically set a column called 9 | -- `updated_at` whenever the row is modified (unless `updated_at` was included 10 | -- in the modified columns) 11 | -- 12 | -- # Example 13 | -- 14 | -- ```sql 15 | -- CREATE TABLE users (id SERIAL PRIMARY KEY, updated_at TIMESTAMP NOT NULL DEFAULT NOW()); 16 | -- 17 | -- SELECT diesel_manage_updated_at('users'); 18 | -- ``` 19 | CREATE OR REPLACE FUNCTION diesel_manage_updated_at(_tbl regclass) RETURNS VOID AS $$ 20 | BEGIN 21 | EXECUTE format('CREATE TRIGGER set_updated_at BEFORE UPDATE ON %s 22 | FOR EACH ROW EXECUTE PROCEDURE diesel_set_updated_at()', _tbl); 23 | END; 24 | $$ LANGUAGE plpgsql; 25 | 26 | CREATE OR REPLACE FUNCTION diesel_set_updated_at() RETURNS trigger AS $$ 27 | BEGIN 28 | IF ( 29 | NEW IS DISTINCT FROM OLD AND 30 | NEW.updated_at IS NOT DISTINCT FROM OLD.updated_at 31 | ) THEN 32 | NEW.updated_at := current_timestamp; 33 | END IF; 34 | RETURN NEW; 35 | END; 36 | $$ LANGUAGE plpgsql; 37 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/migrations/2022-11-25-082807_create_todo/down.sql: -------------------------------------------------------------------------------- 1 | DROP TABLE todos; 2 | DROP TABLE service_contexts; 3 | -- This file should undo anything in `up.sql` -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/migrations/2022-11-25-082807_create_todo/up.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE todos ( 2 | id SERIAL PRIMARY KEY, 3 | title VARCHAR NOT NULL, 4 | description TEXT NOT NULL, 5 | completed BOOLEAN NOT NULL DEFAULT FALSE 6 | ); 7 | 8 | CREATE TABLE service_contexts ( 9 | id SERIAL PRIMARY KEY, 10 | maintenance BOOLEAN NOT NULL DEFAULT FALSE 11 | ); 12 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/scripts/run_postgres.sh: -------------------------------------------------------------------------------- 1 | docker stop diesel_postgres 2 | docker rm diesel_postgres 3 | docker pull postgres:14 4 | docker run --rm -P -p 127.0.0.1:5432:5432 -e POSTGRES_PASSWORD="1234" -d --name diesel_postgres postgres:14 5 | echo DATABASE_URL=postgresql://postgres:1234@127.0.0.1:5432/postgres > .env 6 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/api/controllers/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod todo_handler; 2 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/api/controllers/service_context_handlers.rs: -------------------------------------------------------------------------------- 1 | // 2 | // 3 | // 4 | // pub async fn update_service_context_handler( 5 | // service_context_service: web::Data, post_data: web::Json, 6 | // ) -> Result, ApiError> { 7 | // let service_context = service_context_service.update(post_data.into_inner().into()).await?; 8 | // Ok(web::Json(service_context.into())) 9 | // } 10 | // 11 | // pub async fn get_service_context( 12 | // service_context_service: web::Data, 13 | // ) -> Result, ApiError> { 14 | // let service_context = service_context_service.get_service_context().await?; 15 | // Ok(web::Json(service_context.into())) 16 | // } -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/api/controllers/todo_handler.rs: -------------------------------------------------------------------------------- 1 | use actix_web::{web, Result, HttpResponse}; 2 | use crate::api::dto::todo::{CreateTodoDTO, TodoDTO}; 3 | use crate::domain::error::{ApiError}; 4 | use crate::domain::repositories::repository::ResultPaging; 5 | use crate::domain::repositories::todo::TodoQueryParams; 6 | use crate::domain::services::todo::TodoService; 7 | 8 | pub async fn create_todo_handler( 9 | todo_service: web::Data, post_data: web::Json, 10 | ) -> Result, ApiError> { 11 | let todo = todo_service.create(post_data.into_inner().into()).await?; 12 | Ok(web::Json(todo.into())) 13 | } 14 | 15 | pub async fn list_todos_handler( 16 | todo_service: web::Data, params: web::Query, 17 | ) -> Result>, ApiError> { 18 | let selection = todo_service.list(params.into_inner()).await?; 19 | Ok(web::Json(selection.into())) 20 | } 21 | 22 | pub async fn get_todo_handler( 23 | todo_service: web::Data, params: web::Path, 24 | ) -> Result, ApiError> { 25 | let todo = todo_service.get(params.into_inner()).await?; 26 | Ok(web::Json(todo.into())) 27 | } 28 | 29 | pub async fn delete_todo_handler( 30 | todo_service: web::Data, params: web::Path, 31 | ) -> Result { 32 | todo_service.delete(params.into_inner()).await?; 33 | Ok(HttpResponse::NoContent().finish()) 34 | } 35 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/api/dto/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod todo; -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/api/dto/todo.rs: -------------------------------------------------------------------------------- 1 | use crate::domain::models::todo::{CreateTodo, Todo}; 2 | use serde::{Serialize, Deserialize}; 3 | use crate::domain::repositories::repository::ResultPaging; 4 | 5 | #[derive(Deserialize, Serialize)] 6 | pub struct CreateTodoDTO { 7 | pub title: String, 8 | pub description: String, 9 | } 10 | 11 | #[derive(Debug, Serialize)] 12 | pub struct TodoDTO { 13 | id: i32, 14 | title: String, 15 | description: String, 16 | completed: bool, 17 | } 18 | 19 | impl Into for Todo { 20 | fn into(self) -> TodoDTO { 21 | TodoDTO { 22 | id: self.id, 23 | title: self.title, 24 | description: self.description, 25 | completed: false 26 | } 27 | } 28 | } 29 | 30 | impl Into for CreateTodoDTO { 31 | fn into(self) -> CreateTodo { 32 | CreateTodo { 33 | title: self.title, 34 | description: self.description, 35 | } 36 | } 37 | } 38 | 39 | impl Into for CreateTodo { 40 | fn into(self) -> CreateTodoDTO { 41 | CreateTodoDTO { 42 | title: self.title, 43 | description: self.description, 44 | } 45 | } 46 | } 47 | 48 | impl Into> for ResultPaging { 49 | fn into(self) -> ResultPaging { 50 | ResultPaging { 51 | total: self.total, 52 | items: self.items.into_iter().map(|todo| todo.into()).collect(), 53 | } 54 | } 55 | } -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/api/middleware.rs: -------------------------------------------------------------------------------- 1 | use std::future::{ready, Ready}; 2 | 3 | use actix_web::{body::EitherBody, dev::{self, Service, ServiceRequest, ServiceResponse, Transform}, Error, HttpResponse, web}; 4 | use futures_util::future::LocalBoxFuture; 5 | use log::info; 6 | use crate::domain::services::service_context::ServiceContextService; 7 | 8 | pub struct ServiceContextMaintenanceCheck; 9 | 10 | impl Transform for ServiceContextMaintenanceCheck 11 | where 12 | S: Service, Error = Error>, 13 | S::Future: 'static, 14 | B: 'static, 15 | { 16 | type Response = ServiceResponse>; 17 | type Error = Error; 18 | type Transform = ServiceContextMaintenanceCheckMiddleware; 19 | type InitError = (); 20 | type Future = Ready>; 21 | 22 | fn new_transform(&self, service: S) -> Self::Future { 23 | ready(Ok(ServiceContextMaintenanceCheckMiddleware { service })) 24 | } 25 | } 26 | pub struct ServiceContextMaintenanceCheckMiddleware { 27 | service: S, 28 | } 29 | 30 | impl Service for ServiceContextMaintenanceCheckMiddleware 31 | where 32 | S: Service, Error = Error>, 33 | S::Future: 'static, 34 | B: 'static, 35 | { 36 | type Response = ServiceResponse>; 37 | type Error = Error; 38 | type Future = LocalBoxFuture<'static, Result>; 39 | 40 | dev::forward_ready!(service); 41 | 42 | fn call(&self, request: ServiceRequest) -> Self::Future { 43 | let service_context_service = 44 | request.app_data::>().unwrap(); 45 | 46 | if service_context_service.is_maintenance_active() { 47 | info!("Service is in maintenance mode"); 48 | let (request, _pl) = request.into_parts(); 49 | let response = HttpResponse::ServiceUnavailable().finish().map_into_right_body(); 50 | return Box::pin(async { Ok(ServiceResponse::new(request, response)) }); 51 | } 52 | 53 | let res = self.service.call(request); 54 | Box::pin(async move { 55 | // forwarded responses map to "left" body 56 | res.await.map(ServiceResponse::map_into_left_body) 57 | }) 58 | } 59 | } -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/api/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod controllers; 2 | pub mod dto; 3 | pub mod middleware; -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/container.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use crate::domain::repositories::todo::TodoRepository; 3 | use crate::domain::services::service_context::ServiceContextService; 4 | use crate::domain::services::todo::TodoService; 5 | use crate::infrastructure::databases::postgresql::db_pool; 6 | use crate::infrastructure::repositories::todo::TodoDieselRepository; 7 | use crate::infrastructure::services::service_context::ServiceContextServiceImpl; 8 | use crate::services::todo::TodoServiceImpl; 9 | 10 | pub struct Container { 11 | pub todo_service: Arc, 12 | pub service_context_service: Arc 13 | } 14 | 15 | impl Container { 16 | pub fn new() -> Self { 17 | let pool = Arc::new(db_pool()); 18 | let todo_repository: Arc = Arc::new( 19 | TodoDieselRepository::new(pool.clone()) 20 | ); 21 | let todo_service = Arc::new( 22 | TodoServiceImpl { repository: todo_repository } 23 | ); 24 | let service_context_service = Arc::new( 25 | ServiceContextServiceImpl::new(pool.clone()) 26 | ); 27 | Container { todo_service, service_context_service } 28 | } 29 | } 30 | 31 | impl Default for Container { 32 | fn default() -> Self { 33 | Self::new() 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/create_app.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use actix_web::{App, web}; 3 | use actix_web::{Error}; 4 | use actix_web::body::MessageBody; 5 | use actix_web::dev::{ServiceFactory, ServiceRequest, ServiceResponse}; 6 | use actix_web::middleware::Logger; 7 | use crate::api::controllers::todo_handler::{create_todo_handler, delete_todo_handler, get_todo_handler, list_todos_handler}; 8 | use crate::api::middleware::{ServiceContextMaintenanceCheck}; 9 | use crate::container::Container; 10 | 11 | pub fn create_app(container: Arc) -> App< 12 | impl ServiceFactory< 13 | ServiceRequest, 14 | Response = ServiceResponse, 15 | Config = (), 16 | InitError = (), 17 | Error = Error, 18 | >, 19 | > { 20 | let todo_service = container.todo_service.clone(); 21 | let service_context_service = container.service_context_service.clone(); 22 | 23 | App::new() 24 | .app_data(web::Data::from(todo_service.clone())) 25 | .app_data(web::Data::from(service_context_service.clone())) 26 | .wrap(Logger::default()) 27 | .wrap(ServiceContextMaintenanceCheck) 28 | .service( 29 | web::scope("/todos") 30 | .route("", web::post().to(create_todo_handler)) 31 | .route("", web::get().to(list_todos_handler)) 32 | .route("/{id}", web::get().to(get_todo_handler)) 33 | .route("/{id}", web::delete().to(delete_todo_handler)) 34 | ) 35 | } 36 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/constants.rs: -------------------------------------------------------------------------------- 1 | pub const POSTGRESQL_DB_URI: &str = "DATABASE_URL"; 2 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/error.rs: -------------------------------------------------------------------------------- 1 | use serde::Serialize; 2 | 3 | #[derive(Debug, Serialize)] 4 | pub struct CommonError { 5 | pub message: String, 6 | pub code: u32, 7 | } 8 | 9 | impl std::fmt::Display for CommonError { 10 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 11 | write!(f, "Error: {}, Code: {}", self.message, self.code) 12 | } 13 | } 14 | 15 | #[derive(Debug)] 16 | pub struct ApiError(CommonError); 17 | 18 | impl From for ApiError { 19 | fn from(error: CommonError) -> ApiError { 20 | ApiError(error) 21 | } 22 | } 23 | 24 | impl std::fmt::Display for ApiError { 25 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 26 | write!(f, "{}", self.0) 27 | } 28 | } 29 | 30 | impl actix_web::ResponseError for ApiError { 31 | fn error_response(&self) -> actix_web::HttpResponse { 32 | actix_web::HttpResponse::BadRequest().json(&self.0) 33 | } 34 | } 35 | 36 | #[derive(Debug)] 37 | pub struct RepositoryError { 38 | pub message: String, 39 | } 40 | 41 | impl Into for RepositoryError { 42 | fn into(self) -> CommonError { 43 | CommonError { 44 | message: self.message, 45 | code: 1, 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod models; 2 | pub mod repositories; 3 | pub mod error; 4 | pub mod services; 5 | pub mod constants; -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/models/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod todo; 2 | pub mod service_context; 3 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/models/service_context.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | 3 | #[derive(Clone, Deserialize)] 4 | pub struct ServiceContext { 5 | pub id: i32, 6 | pub maintenance: bool, 7 | } 8 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/models/todo.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | 3 | #[derive(Clone, Deserialize)] 4 | pub struct Todo { 5 | pub id: i32, 6 | pub title: String, 7 | pub description: String, 8 | pub completed: bool, 9 | } 10 | 11 | #[derive(Clone)] 12 | pub struct CreateTodo { 13 | pub title: String, 14 | pub description: String, 15 | } 16 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/repositories/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod todo; 2 | pub mod repository; -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/repositories/repository.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use crate::domain::error::{RepositoryError}; 3 | 4 | pub type RepositoryResult = Result; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct ResultPaging { 8 | pub total: i64, 9 | pub items: Vec, 10 | } 11 | 12 | pub const DEFAULT_OFFSET: Option = Some(0); 13 | pub const DEFAULT_LIMIT: Option = Some(25); 14 | 15 | pub trait QueryParams: Send + Sync { 16 | fn limit(&self) -> i64; 17 | fn offset(&self) -> i64; 18 | } 19 | 20 | #[derive(Debug, Serialize, Deserialize)] 21 | pub struct QueryParamsImpl { 22 | pub limit: Option, 23 | pub offset: Option, 24 | } 25 | 26 | impl QueryParams for QueryParamsImpl { 27 | fn limit(&self) -> i64 { 28 | self.limit.or(DEFAULT_LIMIT).unwrap_or_default() 29 | } 30 | fn offset(&self) -> i64 { 31 | self.offset.or(DEFAULT_OFFSET).unwrap_or_default() 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/repositories/todo.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | use serde::{Deserialize, Serialize}; 3 | use crate::domain::repositories::repository::{QueryParams, ResultPaging, RepositoryResult, DEFAULT_LIMIT, DEFAULT_OFFSET}; 4 | use crate::domain::models::todo::{Todo, CreateTodo}; 5 | 6 | #[derive(Debug, Serialize, Deserialize)] 7 | pub struct TodoQueryParams { 8 | pub limit: Option, 9 | pub offset: Option, 10 | pub title: Option, 11 | } 12 | 13 | impl QueryParams for TodoQueryParams { 14 | fn limit(&self) -> i64 { 15 | self.limit.or(DEFAULT_LIMIT).unwrap_or_default() 16 | } 17 | fn offset(&self) -> i64 { 18 | self.offset.or(DEFAULT_OFFSET).unwrap_or_default() 19 | } 20 | } 21 | 22 | #[async_trait] 23 | pub trait TodoRepository: Send + Sync { 24 | async fn create(&self, new_todo: &CreateTodo) -> RepositoryResult; 25 | async fn list(&self, params: TodoQueryParams) -> RepositoryResult>; 26 | async fn get(&self, todo_id: i32) -> RepositoryResult; 27 | async fn delete(&self, todo_id: i32) -> RepositoryResult<()>; 28 | } 29 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/services/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod todo; 2 | pub mod service_context; 3 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/services/service_context.rs: -------------------------------------------------------------------------------- 1 | use crate::domain::models::service_context::ServiceContext; 2 | 3 | pub trait ServiceContextService: 'static + Sync + Send { 4 | fn get_service_context(&self) -> ServiceContext; 5 | fn update(&self, service_context: ServiceContext) -> ServiceContext; 6 | fn is_maintenance_active(&self) -> bool; 7 | } 8 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/domain/services/todo.rs: -------------------------------------------------------------------------------- 1 | use async_trait::async_trait; 2 | 3 | use crate::domain::error::CommonError; 4 | use crate::domain::models::todo::{CreateTodo, Todo}; 5 | use crate::domain::repositories::repository::ResultPaging; 6 | use crate::domain::repositories::todo::TodoQueryParams; 7 | 8 | #[async_trait] 9 | pub trait TodoService: 'static + Sync + Send { 10 | async fn create(&self, todo: CreateTodo) -> Result; 11 | async fn list(&self, params: TodoQueryParams) -> Result, CommonError>; 12 | async fn get(&self, todo_id: i32) -> Result; 13 | async fn delete(&self, todo_id: i32) -> Result<(), CommonError>; 14 | } 15 | 16 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/databases/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod postgresql; 2 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/databases/postgresql.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | 3 | use diesel; 4 | use diesel::pg::PgConnection; 5 | use diesel::r2d2; 6 | use diesel::r2d2::ConnectionManager; 7 | use dotenv::dotenv; 8 | 9 | use crate::domain::constants::POSTGRESQL_DB_URI; 10 | 11 | pub type Pool = r2d2::Pool>; 12 | pub type PostgresPool = Pool; 13 | pub type DBConn = PostgresPool; 14 | 15 | pub fn db_pool() -> DBConn { 16 | dotenv().ok(); 17 | let database_url = env::var(POSTGRESQL_DB_URI) 18 | .expect(&*format!("{value} must be set", value = POSTGRESQL_DB_URI)); 19 | let manager = ConnectionManager::::new(database_url); 20 | Pool::builder() 21 | .build(manager) 22 | .expect("Failed to create pool") 23 | } 24 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/error.rs: -------------------------------------------------------------------------------- 1 | use diesel::r2d2; 2 | pub use actix_threadpool::{run, BlockingError}; 3 | use crate::domain::error::RepositoryError; 4 | 5 | pub type AsyncPoolError = BlockingError; 6 | 7 | #[derive(Debug)] 8 | pub struct DieselRepositoryError(RepositoryError); 9 | 10 | impl DieselRepositoryError { 11 | pub fn into_inner(self) -> RepositoryError { 12 | self.0 13 | } 14 | } 15 | 16 | impl From for DieselRepositoryError { 17 | fn from(error: r2d2::Error) -> DieselRepositoryError { 18 | DieselRepositoryError(RepositoryError { 19 | message: error.to_string(), 20 | }) 21 | } 22 | } 23 | 24 | impl From for DieselRepositoryError { 25 | fn from(error: diesel::result::Error) -> DieselRepositoryError { 26 | DieselRepositoryError(RepositoryError { 27 | message: error.to_string(), 28 | }) 29 | } 30 | } 31 | 32 | impl From> for DieselRepositoryError { 33 | fn from(error: AsyncPoolError) -> DieselRepositoryError { 34 | DieselRepositoryError(RepositoryError { 35 | message: error.to_string(), 36 | }) 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod databases; 2 | pub mod models; 3 | pub mod repositories; 4 | pub mod schema; 5 | pub mod error; 6 | pub mod services; 7 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/models/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod todo; 2 | pub mod service_context; -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/models/service_context.rs: -------------------------------------------------------------------------------- 1 | use diesel; 2 | use diesel::prelude::*; 3 | use crate::domain::models::service_context::ServiceContext; 4 | use crate::infrastructure::schema::service_contexts; 5 | 6 | 7 | #[derive(Queryable, Insertable, AsChangeset)] 8 | #[diesel(table_name = service_contexts)] 9 | pub struct ServiceContextDiesel { 10 | pub id: i32, 11 | pub maintenance: bool, 12 | } 13 | 14 | impl From for ServiceContext { 15 | fn from(service_context: ServiceContextDiesel) -> Self { 16 | ServiceContext { 17 | id: service_context.id, 18 | maintenance: service_context.maintenance 19 | } 20 | } 21 | } 22 | 23 | impl From for ServiceContextDiesel { 24 | fn from(service_context: ServiceContext) -> Self { 25 | ServiceContextDiesel { 26 | id: service_context.id, 27 | maintenance: service_context.maintenance 28 | } 29 | } 30 | } -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/models/todo.rs: -------------------------------------------------------------------------------- 1 | use diesel; 2 | use diesel::prelude::*; 3 | use crate::domain::models::todo::{CreateTodo, Todo}; 4 | use crate::infrastructure::schema::todos; 5 | 6 | #[derive(Queryable)] 7 | pub struct TodoDiesel { 8 | pub id: i32, 9 | pub title: String, 10 | pub description: String, 11 | pub completed: bool, 12 | } 13 | 14 | // Factory method for creating a new TodoDiesel from a Todo 15 | impl From for TodoDiesel { 16 | fn from(t: Todo) -> Self { 17 | TodoDiesel { 18 | id: t.id, 19 | title: t.title, 20 | description: t.description, 21 | completed: t.completed, 22 | } 23 | } 24 | } 25 | 26 | #[derive(Insertable)] 27 | #[diesel(table_name = todos)] 28 | pub struct CreateTodoDiesel { 29 | pub title: String, 30 | pub description: String, 31 | } 32 | 33 | // Factory method for creating a new Todo from a TodoDiesel 34 | impl Into for TodoDiesel { 35 | fn into(self) -> Todo { 36 | Todo { 37 | id: self.id, 38 | title: self.title, 39 | description: self.description, 40 | completed: self.completed, 41 | } 42 | } 43 | } 44 | 45 | impl From for CreateTodoDiesel { 46 | fn from(t: CreateTodo) -> Self { 47 | CreateTodoDiesel { 48 | title: t.title, 49 | description: t.description, 50 | } 51 | } 52 | } 53 | 54 | impl Into for CreateTodoDiesel { 55 | fn into(self) -> Todo { 56 | Todo { 57 | id: 0, 58 | title: self.title, 59 | description: self.description, 60 | completed: false, 61 | } 62 | } 63 | } -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/repositories/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod todo; -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/repositories/todo.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use actix_threadpool::run; 3 | use async_trait::async_trait; 4 | use diesel::prelude::*; 5 | 6 | use crate::domain::models::todo::{CreateTodo, Todo}; 7 | use crate::domain::repositories::repository::{QueryParams, RepositoryResult, ResultPaging}; 8 | use crate::domain::repositories::todo::{TodoQueryParams, TodoRepository}; 9 | use crate::infrastructure::error::DieselRepositoryError; 10 | use crate::infrastructure::databases::postgresql::DBConn; 11 | use crate::infrastructure::models::todo::{CreateTodoDiesel, TodoDiesel}; 12 | 13 | pub struct TodoDieselRepository { 14 | pub pool: Arc 15 | } 16 | 17 | impl TodoDieselRepository { 18 | pub fn new(db: Arc) -> Self { 19 | TodoDieselRepository { pool: db } 20 | } 21 | } 22 | 23 | #[async_trait] 24 | impl TodoRepository for TodoDieselRepository { 25 | 26 | async fn create(&self, new_todo: &CreateTodo) -> RepositoryResult { 27 | use crate::infrastructure::schema::todos::dsl::todos; 28 | let new_todo_diesel: CreateTodoDiesel = CreateTodoDiesel::from(new_todo.clone()); 29 | let mut conn = self.pool.get().unwrap(); 30 | let result: TodoDiesel = run(move || diesel::insert_into(todos).values(new_todo_diesel) 31 | .get_result(&mut conn)) 32 | .await 33 | .map_err(|v| DieselRepositoryError::from(v).into_inner())?; 34 | Ok(result.into()) 35 | } 36 | 37 | async fn list(&self, params: TodoQueryParams) -> RepositoryResult> { 38 | use crate::infrastructure::schema::todos::dsl::todos; 39 | let pool = self.pool.clone(); 40 | let builder = todos.limit(params.limit()).offset(params.offset()); 41 | let result = run(move || { 42 | let mut conn = pool.get().unwrap(); 43 | builder.load::(&mut conn) 44 | }) 45 | .await 46 | .map_err(|v| DieselRepositoryError::from(v).into_inner())?; 47 | Ok(ResultPaging { 48 | total: 0, 49 | items: result.into_iter().map(|v| v.into()).collect() 50 | }) 51 | } 52 | 53 | async fn get(&self, todo_id: i32) -> RepositoryResult { 54 | use crate::infrastructure::schema::todos::dsl::{id, todos}; 55 | let mut conn = self.pool.get().unwrap(); 56 | run(move || todos.filter(id.eq(todo_id)).first::(&mut conn)) 57 | .await 58 | .map_err(|v| DieselRepositoryError::from(v).into_inner()) 59 | .map(|v| -> Todo { v.into() }) 60 | } 61 | 62 | async fn delete(&self, todo_id: i32) -> RepositoryResult<()> { 63 | use crate::infrastructure::schema::todos::dsl::{id, todos}; 64 | let mut conn = self.pool.get().unwrap(); 65 | run(move || diesel::delete(todos).filter(id.eq(todo_id)) 66 | .execute(&mut conn)) 67 | .await 68 | .map_err(|v| DieselRepositoryError::from(v).into_inner())?; 69 | Ok(()) 70 | } 71 | } -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/schema.rs: -------------------------------------------------------------------------------- 1 | // @generated automatically by Diesel CLI. 2 | 3 | diesel::table! { 4 | service_contexts (id) { 5 | id -> Int4, 6 | maintenance -> Bool, 7 | } 8 | } 9 | 10 | diesel::table! { 11 | todos (id) { 12 | id -> Int4, 13 | title -> Varchar, 14 | description -> Text, 15 | completed -> Bool, 16 | } 17 | } 18 | 19 | diesel::allow_tables_to_appear_in_same_query!( 20 | service_contexts, 21 | todos, 22 | ); 23 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/services/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod service_context; -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/infrastructure/services/service_context.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use diesel::{insert_into, update}; 3 | use diesel::prelude::*; 4 | use diesel::result::Error; 5 | use log::{info}; 6 | use crate::domain::models::service_context::ServiceContext; 7 | use crate::domain::services::service_context::ServiceContextService; 8 | use crate::infrastructure::databases::postgresql::DBConn; 9 | use crate::infrastructure::models::service_context::ServiceContextDiesel; 10 | 11 | #[derive(Clone)] 12 | pub struct ServiceContextServiceImpl { 13 | pub pool: Arc 14 | } 15 | 16 | impl ServiceContextServiceImpl { 17 | pub fn new(db: Arc) -> Self { 18 | ServiceContextServiceImpl { 19 | pool: db 20 | } 21 | } 22 | 23 | fn get_service_context(&self) -> ServiceContext { 24 | use crate::infrastructure::schema::service_contexts::dsl::{id, service_contexts}; 25 | let mut conn = self.pool.get().unwrap(); 26 | let result: Result = service_contexts.filter(id.eq(1)).first::(&mut conn); 27 | 28 | if result.is_err() { 29 | info!("Service context does not exist, creating a service context..."); 30 | return self.create_service_context(); 31 | } 32 | 33 | result.unwrap().into() 34 | } 35 | 36 | fn create_service_context(&self) -> ServiceContext { 37 | use crate::infrastructure::schema::service_contexts::dsl::service_contexts; 38 | let mut conn = self.pool.get().unwrap(); 39 | let result: Result = insert_into(service_contexts).values(ServiceContextDiesel { id: 1, maintenance: false }).get_result(&mut conn); 40 | 41 | if result.is_err() { 42 | panic!("Could not create service context"); 43 | } 44 | result.unwrap().into() 45 | } 46 | } 47 | 48 | impl ServiceContextService for ServiceContextServiceImpl { 49 | fn get_service_context(&self) -> ServiceContext { 50 | self.get_service_context() 51 | } 52 | 53 | fn update(&self, service_context: ServiceContext) -> ServiceContext { 54 | let service_context_diesel: ServiceContextDiesel = ServiceContextDiesel::from(service_context); 55 | let mut conn = self.pool.get().unwrap(); 56 | use crate::infrastructure::schema::service_contexts::dsl::{service_contexts, id}; 57 | let result: Result = update(service_contexts) 58 | .filter(id.eq(1)).set(service_context_diesel).get_result(&mut conn); 59 | 60 | if result.is_err() { 61 | panic!("Could not update service context"); 62 | } 63 | result.unwrap().into() 64 | } 65 | 66 | fn is_maintenance_active(&self) -> bool { 67 | self.get_service_context().maintenance 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/lib.rs: -------------------------------------------------------------------------------- 1 | extern crate core; 2 | 3 | pub mod domain; 4 | pub mod container; 5 | pub mod services; 6 | pub mod infrastructure; 7 | pub mod api; 8 | pub mod create_app; 9 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/main.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | use actix_web::{HttpServer}; 3 | use actix_clean_architecture::{container::Container, create_app::create_app}; 4 | 5 | 6 | #[cfg(test)] 7 | mod tests; 8 | 9 | #[actix_web::main] 10 | async fn main() -> std::io::Result<()> { 11 | let container = Arc::new(Container::new()); 12 | let server = HttpServer::new(move || { create_app(container.clone()) }) 13 | .bind(("127.0.0.1", 8080))?; 14 | server.run().await 15 | } 16 | 17 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/services/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod todo; 2 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/services/todo.rs: -------------------------------------------------------------------------------- 1 | use std::sync::Arc; 2 | 3 | use async_trait::async_trait; 4 | 5 | use crate::domain::error::CommonError; 6 | use crate::domain::models::todo::{CreateTodo, Todo}; 7 | use crate::domain::repositories::repository::ResultPaging; 8 | use crate::domain::repositories::todo::{TodoQueryParams, TodoRepository}; 9 | use crate::domain::services::todo::TodoService; 10 | 11 | #[derive(Clone)] 12 | pub struct TodoServiceImpl { 13 | pub repository: Arc, 14 | } 15 | 16 | impl TodoServiceImpl { 17 | pub fn new(repository: Arc) -> Self { 18 | TodoServiceImpl { 19 | repository, 20 | } 21 | } 22 | } 23 | 24 | #[async_trait] 25 | impl TodoService for TodoServiceImpl { 26 | async fn create(&self, todo: CreateTodo) -> Result { 27 | let mut cloned = todo.clone(); 28 | self.repository 29 | .create(&mut cloned) 30 | .await 31 | .map_err(|e| -> CommonError { e.into() }) 32 | } 33 | 34 | async fn list(&self, params: TodoQueryParams) -> Result, CommonError> { 35 | self.repository 36 | .list(params) 37 | .await 38 | .map_err(|e| -> CommonError { e.into() }) 39 | } 40 | 41 | async fn get(&self, todo_id: i32) -> Result { 42 | self.repository 43 | .get(todo_id) 44 | .await 45 | .map_err(|e| -> CommonError { e.into() }) 46 | } 47 | 48 | async fn delete(&self, todo_id: i32) -> Result<(), CommonError> { 49 | self.repository 50 | .delete(todo_id) 51 | .await 52 | .map_err(|e| -> CommonError { e.into() }) 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/tests/api/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod test_todo_controllers; -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/tests/api/test_service_context_controller.rs: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/microsoft/cookiecutter-rust-actix-clean-architecture/9cedb0aa7b3a5b8527bd5f7caa45d2ed65b2addd/{{cookiecutter.repo_name}}/src/tests/api/test_service_context_controller.rs -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/tests/api/test_todo_controllers.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod test_todo_controllers{ 3 | use std::env; 4 | use std::sync::Arc; 5 | use actix_web::{test}; 6 | use testcontainers::clients; 7 | use serde_json; 8 | use testcontainers::images::postgres; 9 | use actix_clean_architecture::domain::constants::POSTGRESQL_DB_URI; 10 | use actix_clean_architecture::infrastructure::databases::postgresql::db_pool; 11 | use diesel_migrations::{embed_migrations, EmbeddedMigrations, MigrationHarness}; 12 | use serde_json::json; 13 | use actix_clean_architecture::{container::Container, create_app::create_app}; 14 | use actix_clean_architecture::domain::models::todo::Todo; 15 | use actix_clean_architecture::domain::repositories::repository::ResultPaging; 16 | 17 | pub const MIGRATIONS: EmbeddedMigrations = embed_migrations!("./migrations"); 18 | 19 | #[actix_web::test] 20 | async fn test() { 21 | env::set_var("RUST_BACKTRACE", "1"); 22 | env::set_var("RUST_LOG", "debug"); 23 | env::set_var("RUST_BACKTRACE", "1"); 24 | env_logger::init(); 25 | 26 | let docker = clients::Cli::default(); 27 | let postgres_node = docker.run(postgres::Postgres::default()); 28 | let connection_string = &format!( 29 | "postgres://postgres:postgres@127.0.0.1:{}/postgres", postgres_node.get_host_port_ipv4(5432) 30 | ); 31 | 32 | env::set_var(POSTGRESQL_DB_URI, connection_string); 33 | 34 | { 35 | let pool = Arc::new(db_pool()); 36 | pool.get().unwrap().run_pending_migrations(MIGRATIONS).unwrap(); 37 | } 38 | 39 | let container = Arc::new(Container::new()); 40 | 41 | let app = test::init_service(create_app(container)).await; 42 | let request_body = json!({ 43 | "title": "test todo", 44 | "description": "Test description" 45 | }); 46 | 47 | // Creation test 48 | let resp = test::TestRequest::post().uri(&format!("/todos")).set_json(&request_body).send_request(&app).await; 49 | assert!(resp.status().is_success()); 50 | let todo: Todo = test::read_body_json(resp).await; 51 | assert_eq!(todo.title, "test todo"); 52 | assert_eq!(todo.description, "Test description"); 53 | 54 | // Get all test 55 | let resp = test::TestRequest::get().uri(&format!("/todos/{}", todo.id)).send_request(&app).await; 56 | assert!(resp.status().is_success()); 57 | let retrieved_todo: Todo = test::read_body_json(resp).await; 58 | assert_eq!(todo.id, retrieved_todo.id); 59 | assert_eq!(todo.title, retrieved_todo.title); 60 | 61 | // Creation test 62 | let resp = test::TestRequest::post().uri(&format!("/todos")).set_json(&request_body).send_request(&app).await; 63 | assert!(resp.status().is_success()); 64 | 65 | // Get all test 66 | let req = test::TestRequest::get().uri("/todos").to_request(); 67 | let resp = test::call_service(&app, req).await; 68 | assert!(resp.status().is_success()); 69 | let todos: ResultPaging = test::read_body_json(resp).await; 70 | assert_eq!(todos.items.len(), 2); 71 | 72 | // Delete test 73 | let resp = test::TestRequest::delete().uri(&format!("/todos/{}", todo.id)).send_request(&app).await; 74 | assert!(resp.status().is_success()); 75 | 76 | // Get all test 77 | let req = test::TestRequest::get().uri("/todos").to_request(); 78 | let resp = test::call_service(&app, req).await; 79 | assert!(resp.status().is_success()); 80 | let todos: ResultPaging = test::read_body_json(resp).await; 81 | assert_eq!(todos.items.len(), 1); 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /{{cookiecutter.repo_name}}/src/tests/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod api; --------------------------------------------------------------------------------