├── .cargo └── config.toml ├── .github └── workflows │ ├── on_pull_request_generate_coverage_report.yml │ ├── on_push.yml │ ├── on_push_apply_clippy.yml │ ├── on_release_created.yml │ └── on_tag.yml ├── .gitignore ├── .gitjournal.toml ├── .travis.yml.unused ├── Cargo.lock ├── Cargo.toml ├── README.md ├── samples ├── TheBrothersBrick └── stackoverflow ├── src ├── config.rs ├── export.rs ├── feed.rs ├── feed_errors.rs ├── feed_reader.rs ├── feed_utils.rs ├── image_to_data.rs ├── import.rs ├── main.rs ├── message.css ├── message.rs ├── settings.rs ├── store.rs └── syndication.rs └── tests └── unit ├── example.atom ├── example.rss ├── feed.rs ├── feed_utils ├── can_sanitize_email.rs ├── can_sanitize_message_authors.rs └── can_trim_to_chars.rs ├── image_to_data.rs ├── store.rs └── store ├── bugfix_82_export_is_broken.json └── simple_config_store.json /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [target.armv7-unknown-linux-gnueabihf] 2 | linker="arm-linux-gnueabihf-gcc" 3 | [target.armv7-unknown-linux-gnueabi] 4 | linker="arm-linux-gnueabi-gcc" 5 | -------------------------------------------------------------------------------- /.github/workflows/on_pull_request_generate_coverage_report.yml: -------------------------------------------------------------------------------- 1 | on: 2 | pull_request: 3 | name: Run coverage report using tarpaulin and generate cobertura-like report 4 | jobs: 5 | test: 6 | name: coverage 7 | runs-on: ubuntu-latest 8 | container: 9 | image: xd009642/tarpaulin:develop-nightly 10 | options: --security-opt seccomp=unconfined 11 | steps: 12 | - name: Checkout repository 13 | uses: actions/checkout@v2 14 | 15 | - name: Generate code coverage 16 | run: | 17 | cargo +nightly tarpaulin --verbose --all-features --workspace --timeout 120 --out Xml 18 | - name: Use coverage report 19 | uses: 5monkeys/cobertura-action@master 20 | with: 21 | path: cobertura.xml 22 | minimum_coverage: 10 23 | -------------------------------------------------------------------------------- /.github/workflows/on_push.yml: -------------------------------------------------------------------------------- 1 | name: build Rust on push 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | pull_request: 8 | branches: 9 | - master 10 | 11 | jobs: 12 | Run_cargo_tests: 13 | name: Test rrss2imap 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@master 17 | # see https://github.com/marketplace/actions/rust-cargo 18 | # - uses: actions-rs/cargo@v1 19 | # with: 20 | # command: test 21 | # args: --all-features 22 | 23 | Standard_OS_build: 24 | 25 | name: Build ${{ matrix.config.name }} 26 | runs-on: ${{ matrix.config.os }} 27 | strategy: 28 | matrix: 29 | config: 30 | # See https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners 31 | - { os: ubuntu-latest, name: rrss2imap_linux, path: target/debug/rrss2imap} 32 | - { os: macOS-latest, name: rrss2imap_macOS, path: target/debug/rrss2imap} 33 | - { os: windows-latest, name: rrss2imap.exe, path: target/debug/rrss2imap.exe} 34 | # And this one is the dreaded Raspbian one ... 35 | - { os: ubuntu-latest, name: rrss2imap_raspbian, path: target/armv7-unknown-linux-gnueabihf/debug/rrss2imap, target: armv7-unknown-linux-gnueabihf, linker: gcc-arm-linux-gnueabihf} 36 | steps: 37 | - name: Install linker 38 | run: sudo apt-get update && sudo apt-get install ${{matrix.config.linker}} 39 | if: matrix.config.linker!=null 40 | - uses: actions-rs/toolchain@v1.0.6 41 | with: 42 | toolchain: stable 43 | target: ${{matrix.config.target}} 44 | override: true 45 | if: matrix.config.target!=null 46 | - uses: actions-rs/toolchain@v1.0.6 47 | with: 48 | toolchain: stable 49 | if: matrix.config.target==null 50 | - uses: actions/checkout@master 51 | # see https://github.com/marketplace/actions/rust-cargo 52 | - uses: actions-rs/cargo@v1.0.1 53 | with: 54 | command: build 55 | # temp 56 | args: --all-features 57 | if: matrix.config.target==null 58 | - uses: actions-rs/cargo@v1.0.1 59 | with: 60 | use-cross: true 61 | command: build 62 | args: --all-features --target ${{matrix.config.target}} 63 | if: matrix.config.target!=null 64 | - name: Upload build result for OS 65 | uses: actions/upload-artifact@v1 66 | with: 67 | name: ${{matrix.config.name}} 68 | path: ${{matrix.config.path}} 69 | needs: Run_cargo_tests 70 | -------------------------------------------------------------------------------- /.github/workflows/on_push_apply_clippy.yml: -------------------------------------------------------------------------------- 1 | on: 2 | push: 3 | branches: 4 | - master 5 | name: Apply clippy and PR changes 6 | jobs: 7 | clippy_apply: 8 | runs-on: ubuntu-latest 9 | steps: 10 | - uses: actions/checkout@v2 11 | - uses: actions-rs/toolchain@v1 12 | with: 13 | toolchain: nightly 14 | components: clippy 15 | override: true 16 | - run: rustup component add clippy 17 | - run: cargo clippy --fix -Z unstable-options 18 | - name: Create Pull Request 19 | uses: peter-evans/create-pull-request@v3 20 | with: 21 | token: ${{ secrets.GITHUB_TOKEN }} 22 | branch-suffix: timestamp 23 | commit-message: "style(lint): automatically applied clippy lint" 24 | body: Automated changes from clippy 25 | title: "Automatic lint from clippy" 26 | 27 | -------------------------------------------------------------------------------- /.github/workflows/on_release_created.yml: -------------------------------------------------------------------------------- 1 | name: Upload all artifacts to release 2 | 3 | on: 4 | release: 5 | types: 6 | - created 7 | - published 8 | jobs: 9 | Standard_OS_build: 10 | 11 | name: Build ${{ matrix.config.name }} 12 | runs-on: ${{ matrix.config.os }} 13 | strategy: 14 | matrix: 15 | config: 16 | # See https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners 17 | - { os: ubuntu-latest, name: rrss2imap_linux, path: target/release/rrss2imap} 18 | - { os: macOS-latest, name: rrss2imap_macOS, path: target/release/rrss2imap} 19 | - { os: windows-latest, name: rrss2imap.exe, path: target/release/rrss2imap.exe} 20 | # And this one is the dreaded Raspbian one ... 21 | - { os: ubuntu-latest, name: rrss2imap_raspbian, path: target/armv7-unknown-linux-gnueabihf/release/rrss2imap, target: armv7-unknown-linux-gnueabihf, linker: gcc-arm-linux-gnueabihf} 22 | steps: 23 | - name: Install linker 24 | run: sudo apt-get update && sudo apt-get install ${{matrix.config.linker}} 25 | if: matrix.config.linker!=null 26 | - uses: actions-rs/toolchain@v1.0.6 27 | with: 28 | toolchain: stable 29 | target: ${{matrix.config.target}} 30 | override: true 31 | if: matrix.config.target!=null 32 | - uses: actions-rs/toolchain@v1.0.6 33 | with: 34 | toolchain: stable 35 | if: matrix.config.target==null 36 | - uses: actions/checkout@master 37 | # see https://github.com/marketplace/actions/rust-cargo 38 | - uses: actions-rs/cargo@v1.0.1 39 | with: 40 | command: build 41 | args: --release --all-features 42 | if: matrix.config.target==null 43 | - uses: actions-rs/cargo@v1.0.1 44 | with: 45 | use-cross: true 46 | command: build 47 | args: --release --all-features --target ${{matrix.config.target}} 48 | if: matrix.config.target!=null 49 | - name: Upload matrix release asset 50 | uses: actions/upload-release-asset@v1.0.2 51 | env: 52 | GITHUB_TOKEN: ${{ secrets.RELEASE_SECRET }} 53 | with: 54 | upload_url: ${{ github.event.release.upload_url }} 55 | asset_name: ${{matrix.config.name}} 56 | asset_path: ${{matrix.config.path}} 57 | asset_content_type: application/octet-stream 58 | -------------------------------------------------------------------------------- /.github/workflows/on_tag.yml: -------------------------------------------------------------------------------- 1 | name: Create release on tag 2 | 3 | on: 4 | push: 5 | tags: 6 | - '[0-9]+.[0-9]+.[0-9]+' 7 | - 'v[0-9]+.[0-9]+.[0-9]+' 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v3 14 | - name: Create a Release 15 | uses: softprops/action-gh-release@v1 16 | with: 17 | draft: true 18 | token: ${{ secrets.RELEASE_SECRET }} 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .project 2 | .vscode 3 | /target 4 | **/*.rs.bk 5 | config.json 6 | test-results/ 7 | cobertura.xml -------------------------------------------------------------------------------- /.gitjournal.toml: -------------------------------------------------------------------------------- 1 | categories = ["Added", "Changed", "Fixed", "Improved", "Removed"] 2 | category_delimiters = ["[", "]"] 3 | colored_output = true 4 | enable_debug = true 5 | excluded_commit_tags = [] 6 | enable_footers = false 7 | show_commit_hash = false 8 | show_prefix = false 9 | sort_by = "date" 10 | template_prefix = "" 11 | -------------------------------------------------------------------------------- /.travis.yml.unused: -------------------------------------------------------------------------------- 1 | services: 2 | - docker 3 | language: rust 4 | rust: 5 | - stable 6 | cache: cargo 7 | 8 | # All Rust build architectures are defined here 9 | matrix: 10 | include: 11 | - env: DEBUG=debug CROSS=cross TARGET=x86_64-unknown-linux-gnu 12 | os: linux 13 | - env: DEBUG=debug TARGET=i686-apple-darwin 14 | os: osx 15 | osx_image: xcode10 16 | - env: DEBUG=debug TARGET=x86_64-apple-darwin 17 | os: osx 18 | osx_image: xcode10 19 | - env: TARGET=x86_64-pc-windows-msvc 20 | os: windows 21 | - env: DEBUG=debug CROSS=cross TARGET=armv7-unknown-linux-gnueabihf 22 | os: linux 23 | addons: 24 | apt: 25 | packages: 26 | - gcc-arm-linux-gnueabihf 27 | 28 | # part shamelessly borrowed from https://github.com/Enet4/nifti-rs/blob/438538bfffa2347ece5a09c2a37c0c407ec6fbee/.travis.yml 29 | before_script: 30 | - export PATH="$PATH:$HOME/.cargo/bin" 31 | - rustup target add $TARGET || true 32 | - if [ ! -z "$CROSS" ]; then 33 | cargo install cross --force; 34 | export CARGO_CMD="cross"; 35 | else 36 | export CARGO_CMD=cargo; 37 | fi 38 | 39 | # This is the script that will be run on each matrix element 40 | script: 41 | - | 42 | if [ $TARGET = "x86_64-unknown-linux-gnu" ]; then 43 | echo "Running on $TARGET, so running tests!" 44 | cargo test 45 | fi 46 | - if [ ! -z "$DEBUG" ]; then 47 | $CARGO_CMD build --target $TARGET --verbose 48 | fi 49 | - $CARGO_CMD build --target $TARGET --verbose --release 50 | - mkdir -p target/executable 51 | - ls -la target/${TARGET} 52 | - cp target/${TARGET}/debug/rrss2imap target/executable/rrss2imap-${TARGET}-debug 53 | - cp target/${TARGET}/release/rrss2imap target/executable/rrss2imap-${TARGET} 54 | - ls -la target/executable 55 | 56 | # Once the Rust packages are built, here they are deployed 57 | deploy: 58 | provider: releases 59 | api_key: ${GITHUB_OAUTH} 60 | file_glob: true 61 | file: target/executable/* 62 | skip_cleanup: true 63 | overwrite: true 64 | # This way, the release is not directly visible 65 | draft: true 66 | verbose: true 67 | # Release name on body 68 | name: "$TRAVIS_TAG" 69 | # Body is created by git journal ! 70 | # body: "$JOURNAL" 71 | on: 72 | repo: Riduidel/rrss2imap 73 | tags: true 74 | 75 | branches: 76 | except: 77 | - "/^untagged/" 78 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "rrss2imap" 3 | version = "0.5.2" 4 | authors = ["Nicolas Delsaux "] 5 | description = "A simple script that exposes RSS entries as mail messages, pushed directly using IMAP" 6 | homepage = "https://github.com/Riduidel/rrss2imap" 7 | repository = "https://github.com/Riduidel/rrss2imap" 8 | readme = "README.md" 9 | keywords = ["RSS", "Atom", "IMAP", "command-line", "script"] 10 | categories = ["command-line-utilities", "email"] 11 | license = "GPL-3.0-or-later" 12 | edition = "2018" 13 | # This is only valid for the archive available in crates.io, not for the generated executable 14 | include = [ "templates/*", "src/**/*", "Cargo.toml" ] 15 | # This allows renaming of tag name to be consistent with already long history of rrss2imap versions 16 | 17 | [profile.release] 18 | opt-level = "z" # Optimize for size. 19 | lto = true 20 | 21 | [badges] 22 | travis-ci = { repository = "Riduidel/rrss2imap", branch = "master" } 23 | is-it-maintained-issue-resolution = { repository = "riduidel/rrss2imap" } 24 | is-it-maintained-open-issues = { repository = "riduidel/rrss2imap" } 25 | maintenance = { status = "actively-developed" } 26 | 27 | [dependencies] 28 | # logging interface for Rust 29 | log = "0.4" 30 | # chosen logger implementation allowing easy configuration 31 | flexi_logger = "0.25" 32 | # Used for parsing command line args 33 | structopt = "0.3" 34 | # Used for reading/writing config file 35 | serde = "1.0" 36 | # macro implementation for serde easy usage 37 | serde_derive = "1.0" 38 | # allow reading and writing to json 39 | serde_json = "1.0" 40 | # Used for import/export TODO replace by quick-xml 41 | treexml = "0.7" 42 | # Used to get feed entries (and images, when it will be possible) 43 | atom_syndication = "0.12" 44 | rss = "2.0" 45 | # time handling 46 | chrono = { version = "0.4", features = ["serde"] } 47 | # Fixing poorly formatted dates ! 48 | rfc822_sanitizer = "0.3" 49 | # And an imap connector, obviously 50 | imap = "2.3" 51 | native-tls = "0.2" 52 | # Allows to easily start tera 53 | lazy_static = "1.4" 54 | lol_html = "1.0" 55 | base64 = "0.21" 56 | # A lightweight http client (with no default support for async/await) 57 | ureq = {version = "2.6", features = ["native-tls", "native-certs"]} 58 | xhtmlchardet = "2.1" 59 | human-panic = "1.0" 60 | url = "2.1" 61 | tree_magic_mini = "3.0" 62 | mail-builder = "0.3" 63 | unidecode = "0.3" 64 | regex = "1.5" 65 | custom_error = "1.8" 66 | directories = "5.0" 67 | tests_bin = "1.0" 68 | rayon = "1.7" 69 | 70 | [dev-dependencies] 71 | assert_cli = "0.6" 72 | spectral = "0.6" 73 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # rrss2imap 2 | 3 | [![Built with cargo-make](https://sagiegurari.github.io/cargo-make/assets/badges/cargo-make.svg)](https://sagiegurari.github.io/cargo-make) 4 | [![Build Status](https://travis-ci.org/Riduidel/rrss2imap.svg?branch=master)](https://travis-ci.org/Riduidel/rrss2imap) 5 | 6 | rrss2imap is a Rust reimplementation of the classical Python script [rss2imap](https://github.com/rcarmo/rss2imap) 7 | 8 | Goals of this project include 9 | 10 | * ✅ Having a reasonably performant implementation of rss2imap (by performant I mean able to run without problem on my Raspberry) 11 | * ✅ Learn Rust 12 | * ✅ Explore parallel mechanism (thanks Rayon) 13 | * ✅ Maybe provide some kind of image embedding (DONE) 14 | 15 | ## Getting Started 16 | 17 | ### Download rrss2imap 18 | 19 | rrss2imap can be downloaded from [**releases page**](https://github.com/Riduidel/rrss2imap/releases). 20 | If there is no release for your platform, you can fill an issue ... or if you know Travis, you can even add your platform to `.travis.yml`. 21 | 22 | ### As a user 23 | 24 | 25 | 26 | Application transforming rss feeds into email by directly pushing the entries into IMP folders. 27 | This application is an adaption of the rss2imap Python script to Rust. 28 | 29 | #### How to use ? 30 | 31 | The simplest way to understand what to do is just to run `rrss2imap --help` 32 | 33 | It should output something like 34 | 35 | FLAGS: 36 | -h, --help Prints help information 37 | -V, --version Prints version information 38 | 39 | SUBCOMMANDS: 40 | add Adds a new feed given its url 41 | delete Delete the given feed 42 | email Changes email address used in feed file to be the given one 43 | export Export subscriptions as opml file 44 | help Prints this message or the help of the given subcommand(s) 45 | import import the given opml file into subscriptions 46 | list List all feeds configured 47 | new Creates a new feedfile with the given email address 48 | reset Reset feedfile (in other words, remove everything) 49 | run Run feed parsing and transformation 50 | 51 | Which give you a glimpse of what will happen 52 | 53 | Each of these commands also provide some help, when run with the same `--help` flag. 54 | 55 | The important operations to memorize are obviously 56 | 57 | #### `rrss2imap new` 58 | 59 | Creates a new `config.json` file. At init time, the config file will only contains `settings` element 60 | with the email address set. You **have** to set 61 | 62 | * the used imap server 63 | ** with user login and password 64 | ** and security settings (secure should contain `{"Yes": secure port}` for imap/s 65 | or `{"No": unsecure port}` for simple imap) 66 | * the default config 67 | ** folder will be the full path to an imap folder where entries will fall in 68 | ** email will be the recipient email address (which may not be yours for easier filtering) 69 | ** Base64 image inlining 70 | * feeds is the list of all rss feeds that can be added 71 | 72 | #### `rrss2imap add` 73 | 74 | This command will add a new feed to your config. You can directly set here the email recipient as well as the folder 75 | (but not the base64 image inlining parameter) 76 | 77 | #### `rrss2imap run` 78 | 79 | THis is the main command. It will 80 | 81 | 1. get all rss/atom feed contents 82 | 2. List all new entries in these feeds 83 | 3. Transform these entries into valid email messages 84 | 4. Push these mail messages directly on IMAP server 85 | 86 | #### `rrss2imap list` 87 | 88 | Displays a list of the rss feeds. Here is an example 89 | 90 | ``` 91 | 0 : http://tontof.net/?rss (to: Nicolas Delsaux (default)) RSS/rrss2imap (default) 92 | 1 : https://www.brothers-brick.com/feed/ (to: Nicolas Delsaux (default)) RSS/rrss2imap (default) 93 | 2 : https://nicolas-delsaux.hd.free.fr/rss-bridge/?action=display&bridge=LesJoiesDuCode&format=AtomFormat (to: Nicolas Delsaux (default)) RSS/rrss2imap (default) 94 | ``` 95 | 96 | Please notice that each entry has an associated number, which is the one to enter when running `rrss2imap delete ` 97 | 98 | #### `config.json` format 99 | 100 | A typical feedfile will look like this 101 | 102 | ```json 103 | { 104 | "settings": { 105 | "email": { 106 | "server": "the imap server of your mail provider", 107 | "user": "your imap user name", 108 | "password": "your imap user password", 109 | "secure": { 110 | "Yes": 993 // Set to "Yes": port for imaps or "No": port for unsecure imap 111 | } 112 | }, 113 | // This config is to be used for all feeds 114 | "config": { 115 | // This is the email address written in each mail sent. It can be different from the email user 116 | "email": "Nicolas Delsaux ", 117 | // This is the imap folder in which mails will be written 118 | "folder": "RSS/rrss2imap" 119 | // Setting this to true will force rrss2imap to transform all images into 120 | // base64. This prevents images from beind downloaded (and is really cool when reading feeds from a smartphone) 121 | // But largely increase each mail size (which can be quite bothering) 122 | "inline_image_as_data": true 123 | } 124 | }, 125 | "feeds": [ 126 | { 127 | "url": "http://tontof.net/?rss", 128 | // This last updated is updated for each entry and should be enough to have rss items correctly read 129 | "last_updated": "2019-05-04T16:53:15", 130 | "config": { 131 | // each config element can be overwritten at the feed level 132 | } 133 | }, 134 | ``` 135 | 136 | 137 | 138 | 139 | ### As a developer 140 | * clone this repository 141 | * run `cargo run` 142 | 143 | #### Prerequisites 144 | 145 | You need a complete rust build chain 146 | 147 | To perform a release, you'll also need 148 | 149 | * [cargo release](https://github.com/sunng87/cargo-release) 150 | * [git journal](https://github.com/saschagrunert/git-journal) 151 | 152 | ##### Releasing 153 | 154 | 1. Install cargo release (`cargo install cargo-release`) and git-journal (`cargo install git-journal`) 155 | 1. Run `cargo release`. This will build a version of the code, push it onto crates/io and tag the repository. 156 | Thanks to GitHub Actions (and more specifically the `on_tag.yml` one), once the tag is pushed to GitHub, a release is created. 157 | 1. Publish the release. This will trigger the `on_release_created.yml` which will build executables for the target platforms and attach them to the release. 158 | 159 | And release is done! It was easy, no? 160 | 161 | #### Installing 162 | 163 | 1. Dowload latest version from [Github releases page](https://github.com/Riduidel/rrss2imap/releases) 164 | 1. Run `rrss2imap new` which will create the `config.json` 165 | 1. Fill the missing parts (typically include email configuration) 166 | 1. Run with `rrss2imap run` 167 | 168 | ### Running the tests 169 | 170 | Automated tests can be run with `cargo test`. 171 | Coverage is done thanks to [tarpaulin](https://github.com/xd009642/tarpaulin). 172 | Coverage is also computed during pull requests runs 173 | 174 | ## Built With 175 | 176 | Take a look at Cargo dependencies 177 | 178 | ## Contributing 179 | 180 | Please read [CONTRIBUTING.md](https://gist.github.com/PurpleBooth/b24679402957c63ec426) for details on our code of conduct, and the process for submitting pull requests to us. 181 | 182 | ## Versioning 183 | 184 | We use [SemVer](http://semver.org/) for versioning. For the versions available, see the [tags on this repository](https://github.com/your/project/tags). 185 | 186 | ## Authors 187 | 188 | * **Nicolas Delsaux** - *Initial work* - [Riduidel](https://github.com/Riduidel) 189 | 190 | See also the list of [contributors](https://github.com/Riduidel/rrss2imap/contributors) who participated in this project. 191 | 192 | ## License 193 | 194 | This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details 195 | 196 | ## Acknowledgments 197 | 198 | * [Rui Carmo](https://github.com/rcarmo) for Python implementation of [rss2imap](https://github.com/rcarmo/rss2imap) 199 | * [Aaron Swartz](https://en.wikipedia.org/wiki/Aaron_Swartz) for [RSS](https://en.wikipedia.org/wiki/RSS) (and [rss2email](https://github.com/rss2email/rss2email)) 200 | 201 | -------------------------------------------------------------------------------- /samples/TheBrothersBrick: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | The Brothers Brick 6 | 7 | https://www.brothers-brick.com 8 | World's No. 1 source for LEGO news, reviews, and fan creations. 9 | Fri, 08 Feb 2019 14:00:19 +0000 10 | en-US 11 | hourly 12 | 1 13 | https://wordpress.org/?v=5.0.3 14 | 40578819 Subscribe with My Yahoo!Subscribe with NewsGatorSubscribe with My AOLSubscribe with BloglinesSubscribe with NetvibesSubscribe with GoogleSubscribe with Pageflakes 15 | Cadet Thrawn outwits his opponents in the metallurgy lab 16 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/eWF3-ZnktaM/ 17 | https://www.brothers-brick.com/2019/02/08/cadet-thrawn-outwits-his-opponents-in-the-metallurgy-lab/#respond 18 | Fri, 08 Feb 2019 14:00:19 +0000 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | https://www.brothers-brick.com/?p=171427 28 | 29 | While many stories and characters passed into so-called “Legends” status when Disney acquired Lucasfilm, some fan-favorites have been incorporated into the new canon, and I can’t think of any character more deserving than Mitth’raw’nuruodo — or Thrawn, as he is more commonly known in Galactic Basic. The Chiss Grand Admiral had a more humble introduction to Imperial life as a cadet at the military academy on Coruscant in the first book of Timothy Zahn‘s new trilogy. In this jam-packed scene by CRCT Productions built as a RebelLUG collaboration, there are so many great details, not the least of which is the light gray angled walkway bordered by the two-color spring-loaded shooter brick.

30 |

Metallurgy Lab Trick I A RebelLUG LEGO Collaboration MOC

31 |

32 |

Every piece of machinery, from the fabrication unit on the back wall, to the robot arm on the left, looks fully functional, with specific purposes. The lighting is also a very nice touch, giving the scene the stark sterile feel that any Imperial facility deserves. I don’t know if those tools on the back counter have been properly stowed, though I think the approaching officer has other things on his mind, like why these cadets are engaged in gambling activities, which are against regulations.

33 |

Metallurgy Lab Trick I A RebelLUG LEGO Collaboration MOC

34 |

The fabrication unit is worth a closer look, with a bank of computers and a robot arm to create — well, whatever is supposed to get created in a Star Wars metallurgy lab.

35 |

Metallurgy Lab Trick I A RebelLUG LEGO Collaboration MOC

36 | ]]>
37 | https://www.brothers-brick.com/2019/02/08/cadet-thrawn-outwits-his-opponents-in-the-metallurgy-lab/feed/ 38 | 0 39 | 171427 https://www.brothers-brick.com/2019/02/08/cadet-thrawn-outwits-his-opponents-in-the-metallurgy-lab/
40 | 41 | The LEGO Movie 2’s sewer babies just got bigger 42 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/EcPUFrbS1xA/ 43 | https://www.brothers-brick.com/2019/02/08/the-lego-movie-2s-sewer-babies-just-got-bigger/#respond 44 | Fri, 08 Feb 2019 08:00:13 +0000 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | https://www.brothers-brick.com/?p=171454 55 | 56 | The LEGO Movie 2: The Second Part officially opens today (be sure to read our LEGO Movie 2 review), and to mark the occasion, “Big Daddy” Nelson has taken a few of the movie’s smallest characters and given them a huge makeover. Built in the style of the classic 3723 Creator Minifigure set, these giant sewer babies look just like their miniature counterparts from the TLM2 Accessory Set.

57 |

02

58 |

They have even more range than the toddler elements they’re based on, featuring double-sided heads and movable hands. They’re also more complex than you might think, with some clever mosaic work needed to translate the prints on the torsos and heads into bricks.

59 |

01

60 | ]]>
61 | https://www.brothers-brick.com/2019/02/08/the-lego-movie-2s-sewer-babies-just-got-bigger/feed/ 62 | 0 63 | 171454 https://www.brothers-brick.com/2019/02/08/the-lego-movie-2s-sewer-babies-just-got-bigger/
64 | 65 | Color and light blend beautifully in this Medieval city 66 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/SjjzU5g6xHY/ 67 | https://www.brothers-brick.com/2019/02/07/color-and-light-blend-beautifully-in-this-medieval-city/#comments 68 | Fri, 08 Feb 2019 02:00:15 +0000 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | https://www.brothers-brick.com/?p=171016 78 | 79 | Fantasy castle building often leans towards the creation of dark, gloomy and foreboding places in which one would not lightly tread. Master castle builder Jonas Wide usually takes a different route, however, using cheerful splashes of color to create incredibly warm and welcoming scenes. This style is definitely evident in his latest creation, the Houses of Barqa:

80 |

Houses in Barqa

81 |

The buildings are elegantly designed and laid out, but the real star of this show is the use of color. If there’s been a better use of sand red, I haven’t seen it. The pastel palette blends so well with the more subtle tan/dark tan foundations and street. Taken together, it’s a gorgeous and eye-catching scene. Clever use of lighting also makes for some atmospheric and quite realistic looking images.

82 |

Houses in Barqa

83 |

If Jonas’ city leaves you wishing for more, definitely check out his amazing Streets of Barqa from several years ago or last year’s Aslanic Temple in Barqa.

84 | ]]>
85 | https://www.brothers-brick.com/2019/02/07/color-and-light-blend-beautifully-in-this-medieval-city/feed/ 86 | 1 87 | 171016 https://www.brothers-brick.com/2019/02/07/color-and-light-blend-beautifully-in-this-medieval-city/
88 | 89 | Stylish sci-fi racer approaches the starting line 90 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/cMVGbcZtImI/ 91 | https://www.brothers-brick.com/2019/02/07/stylish-sci-fi-racer-approaches-the-starting-line/#respond 92 | Thu, 07 Feb 2019 20:00:16 +0000 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | https://www.brothers-brick.com/?p=171371 101 | 102 | When it comes to building a great LEGO model, one thing that really shows off a builder’s skill is the ability to create something that can easily be mistaken for something other than plastic interlocking bricks. This sci-fi racing car by Vince_Toulouse is a perfect blend of smoothly curved details and unique parts, like the troll arms used for the main engine exhaust ports, or the mermaid tails housing the headlights. But by far, my favorite feature is the two-color striping throughout the car, which provides the perfect polish.

103 |

GR440 III

104 | ]]>
105 | https://www.brothers-brick.com/2019/02/07/stylish-sci-fi-racer-approaches-the-starting-line/feed/ 106 | 0 107 | 171371 https://www.brothers-brick.com/2019/02/07/stylish-sci-fi-racer-approaches-the-starting-line/
108 | 109 | Neo-classic space drill inspection 110 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/H8i3v-M_MvA/ 111 | https://www.brothers-brick.com/2019/02/07/neo-classic-space-drill-inspection/#comments 112 | Thu, 07 Feb 2019 14:00:51 +0000 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | https://www.brothers-brick.com/?p=171357 122 | 123 | Whilst the spacecraft of the classic LEGO space theme seem to grab the nostalgic limelight, for some of us the lunar rovers were the real stars. Maybe Andreas Lenander is trying to make this point, and if he is what better way than through this magnificent Neo-Classic Space Drilling Rover. It’s certainly got my classic space pulse racing. Although it sticks faithfully to the grey and blue colour scheme, its forms and shape speak to a more realistic post-NASA near future. There’s phenomenal part usage too, just look at the way the old rails form the drill casing, and the Jurassic World gyrosphere looks as if it were designed to be a moon buggy cab. To complete the scene Andrea signs off with a troop of new pink astronauts, from Benny’s Space Squad, scouring the variegated planet surface for its precious mineral reserves.

124 |

Syrsan - NCS drilling rover

125 | ]]>
126 | https://www.brothers-brick.com/2019/02/07/neo-classic-space-drill-inspection/feed/ 127 | 1 128 | 171357 https://www.brothers-brick.com/2019/02/07/neo-classic-space-drill-inspection/
129 | 130 | Gazooks! Here come Cahdok and Gahdok! 131 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/4K_F0AZMWkc/ 132 | https://www.brothers-brick.com/2019/02/07/gazooks-here-come-cahdok-and-gahdok/#respond 133 | Thu, 07 Feb 2019 08:00:46 +0000 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | https://www.brothers-brick.com/?p=171370 143 | 144 | Years after being discontiniued, Bionicle remains a strong and very much autonomous theme in LEGO fan builds. Unique pieces and almost complete freedom of angles set it apart from most other styles, but was it always so? Jayfa and Andrew Steele bring us back to 2002, a time when Bionicle was still searching for an identity and was for the most part a sub-theme to Technic. The glorious titan set Cahdok and Gahdok was a load of gears, rubber bands, liftarms and most importantly, play features. I do not think this re-imagining has much of those, but it does capture the spirit of the Bohrok queens.

145 |

Cahdok and Gahdok

146 |

147 |

Jayfa’s and Andrew’s Cahdok and Gahdok are very true to their source, but what I find the most impressive is how consistent the two builders made them. There is a bit of unique style in both of them while still giving the appearance like they came from the same universe. The flowing curves are a stark contrast to the official set, and yet they look incredibly natural. The purple and turquoise hoses under the neck and tail are reminiscent of the Competition theme which has a lot of parallels with early Bionicle play functions and this set in particular.

148 |

Cahdok and Gahdok

149 |

Gahdok has an interesting little detail that I am particularly in love with (besides all sorts of little mechanical bits on the legs) – the hips have red Bohrok shields taken directly from the official set, possibly as a nod to the original design.

150 |

Gahdok

151 |

It is interesting how Jayfa changed the colour scheme of Cahdok. It looks interesting and is probably beter overall, but one could understand that the original set was made with recombination in mind.

152 |

Cahdok

153 | ]]>
154 | https://www.brothers-brick.com/2019/02/07/gazooks-here-come-cahdok-and-gahdok/feed/ 155 | 0 156 | 171370 https://www.brothers-brick.com/2019/02/07/gazooks-here-come-cahdok-and-gahdok/
157 | 158 | The tree house of your LEGO dreams 159 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/PDtQw6oJwao/ 160 | https://www.brothers-brick.com/2019/02/06/the-tree-house-of-your-lego-dreams/#respond 161 | Thu, 07 Feb 2019 02:00:53 +0000 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | https://www.brothers-brick.com/?p=171328 170 | 171 | Once gain I have the pleasure of highlighting Alanboar Cheung‘s amazing work for TBB, previously sharing his butterfly mimicry and cloud car models. Never predictable, his newest build, a quirky dream treehouse, is inspired by The LEGO Movie 2.

172 |

LEGO MOVIE DREAM TREE HOUSE 樂高電影夢幻樹屋

173 |

Built for the movie’s unique cast of characters, it incorporates a rainbow, clouds, piano room, and even a Unikitty slide — although I’m little worried as to where you’d end up if you actually tried to ride it. Simply exploding with colourful charm and cute details, it’s one of those creations that is going to be just as much fun to play with as is to marvel at. It’s also another reason – as if I needed one –  to get excited about seeing the film, which comes out later this week.

174 | ]]>
175 | https://www.brothers-brick.com/2019/02/06/the-tree-house-of-your-lego-dreams/feed/ 176 | 0 177 | 171328 https://www.brothers-brick.com/2019/02/06/the-tree-house-of-your-lego-dreams/
178 | 179 | You know my methods Watson 180 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/jQqg41VX6KM/ 181 | https://www.brothers-brick.com/2019/02/06/you-know-my-methods-watson/#comments 182 | Wed, 06 Feb 2019 20:00:44 +0000 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | https://www.brothers-brick.com/?p=171365 191 | 192 | When the body of Sir Charles Murgatroyd is discovered in his library, the local Constabulary are immediately called for. Foul play is suspected, and an investigation begins. Despite their best efforts, the police remain baffled as to motive or culprit. Only one hope remains, to summon the consulting detective Sherlock Holmes and his friend Dr John Watson…

193 |

LEGO Sherlock Holmes and Watson

194 |

Since reading The Hound of the Baskervilles as a child, I’ve always been a huge fan of Arthur Conan Doyle’s Sherlock Holmes stories. I’ve often pondered the idea of building scenes from some of his most famous adventures, and this little model was something of a trial. The library-based murder depicted is not based on any particular story, but I’m quite pleased with how it turned out. The trickiest bit of the whole model was the window — it took my ages to get the curtain to look right, and to get the leaded windows to fill the space without gaps.

195 | ]]>
196 | https://www.brothers-brick.com/2019/02/06/you-know-my-methods-watson/feed/ 197 | 3 198 | 171365 https://www.brothers-brick.com/2019/02/06/you-know-my-methods-watson/
199 | 200 | The LEGO Movie 2 Collectible Minifigures 71023 Feel Guide [Review] 201 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/MqChhN0BHEA/ 202 | https://www.brothers-brick.com/2019/02/06/the-lego-movie-2-collectible-minifigures-71023-feel-guide-review/#comments 203 | Wed, 06 Feb 2019 14:00:32 +0000 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | https://www.brothers-brick.com/?p=171407 214 | 215 | The latest series of LEGO’s Collectible Minifigures theme based The LEGO Movie 2: The Second Part are now hitting stores. We’ve already brought you our full, in-depth review, so that means it’s now time for our Feel Guide to help you poke and prod your way to a full set of 20 characters. 71023 LEGO Minifigures – The LEGO Movie 2: The Second Part are available now in retail stores and online, for US $3.99 | CAN $4.99.

216 |

217 |

So let’s check out what makes these figures stand out from one another when all you’ve got is an opaque package and a crowd of onlookers in the store aisle.

218 |

219 |

The case

220 |

Like the majority of previous series, this set comes packed in cases of 60. Unlike previous series, they seem to be pretty well sorted within the case, but it’s not perfect. The cases are divided into three rows of 20 packs each. Many collectors have been reporting success with grabbing one full row and getting the full set of 20, or very close to it. We paid extra attention to this when sorting our case, and ours didn’t break down quite that neatly (nor did it break into precisely three full sets, having one figure mismatch). However, even with our case, grabbing a row would have netted you about 17 unique figures, so if you’re short on time or dexterity, this will be your best bet. One big caveat, of course: this only applies to new cases that haven’t already been rifled through by others.

221 |

222 |

This series also differs from previous ones in another way. Several of the characters are packaged with an inner plastic bag. This bag was found in six characters in our review, and the bag was present in all of the packs for that given character. However, we’ve talked with other collectors who found inner bags with other characters, or found these without bags. Ultimately, it seems a bit random (perhaps LEGO has more than factory or production line making this series). So although the inner bags crinkle loudly and might have served as a good indicator on which fig you’re handling, we can’t recommend this method as it doesn’t appear reliable enough. Thankfully, the presence of an inner bag doesn’t have much of an effect on the ability to feel the elements inside.

223 | 224 | 225 | 226 | 227 | 228 | 229 | 230 | 231 |

Download a PDF of this cheat sheet to use on your phone in the store when searching for minifigures. As always, we’ve developed this guide by experience, having started way back with Series 1. And of course, we’ve already sorted lots of The LEGO Movie 2 Collectible Minifigures by feel.

232 |

233 |

Let us know in the comments your own tips and tricks for finding figures! Do remember to also check out our Full Review of this LEGO Movie 2 minifigure series!

234 |

235 |

71023 The LEGO Movie 2 Collectible Minifigures are available now from the LEGO Shop Online (US $3.99 | CAN $4.99) and Amazon, as well as third-party sellers on Bricklink and eBay.

236 |

The LEGO Group sent The Brothers Brick a copy of this set for review. Providing TBB with products for review guarantees neither coverage nor positive reviews.

237 | ]]>
238 | https://www.brothers-brick.com/2019/02/06/the-lego-movie-2-collectible-minifigures-71023-feel-guide-review/feed/ 239 | 6 240 | 171407 https://www.brothers-brick.com/2019/02/06/the-lego-movie-2-collectible-minifigures-71023-feel-guide-review/
241 | 242 | Tropical paradise is a plea for warmer weather 243 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/ddNmm5bsQyI/ 244 | https://www.brothers-brick.com/2019/02/06/tropical-paradise-is-a-plea-for-warmer-weather/#comments 245 | Wed, 06 Feb 2019 08:00:39 +0000 246 | 247 | 248 | 249 | 250 | 251 | 252 | 253 | 254 | https://www.brothers-brick.com/?p=171333 255 | 256 | Those of us in the northwestern hemisphere have had a tough time lately, what with the polar vortex, record-shattering temperatures (as low as -63 degrees Celsius at my mom’s house in Winnipeg, Canada) and unrelenting snow and ice. Even here in southwestern Arkansas, where winter generally just means anything below 10 degrees Celsius, we were racing to buy wintry garments normally only seen in movies about Alaska. On the flip side, the nasty weather meant more time shamelessly spent in the LEGO room. I built this tropical scene while daydreaming about places where I don’t have to leave faucets running for fear of water pipes bursting inside my home.

257 |

DSC_0162-6

258 |

This was a simple but fun build to throw together. There are no crazy techniques or excessively nice parts usages (NPU) to highlight here. But a dash of color, proper composition and a bit of photography know-how can just about always turn a bland build into something that really catches the eye. If you like the trees, they are easily recreated using the 4mm pneumatic hose and cylinder bricks. They can be twisted around each other and held in that position with the leaf elements. Simple and easy jungle tree!

259 | ]]>
260 | https://www.brothers-brick.com/2019/02/06/tropical-paradise-is-a-plea-for-warmer-weather/feed/ 261 | 1 262 | 171333 https://www.brothers-brick.com/2019/02/06/tropical-paradise-is-a-plea-for-warmer-weather/
263 | 264 | Long have I served as the guardian spirit 265 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/Kr0PmocCLZk/ 266 | https://www.brothers-brick.com/2019/02/05/long-have-i-served-as-the-guardian-spirit/#respond 267 | Wed, 06 Feb 2019 02:00:04 +0000 268 | 269 | 270 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | https://www.brothers-brick.com/?p=170980 278 | 279 | Guardian of the Hyrule Forest. Giver of Quests. Insides infested with Skulltula Spiders.
280 | The Great Deku Tree from Nintendo classic Zelda: The Ocarina of Time is given the LEGO treatment by Julius von Brunk. The microscale model is nicely-done, perfectly capturing the tree’s sleepy-looking face. But it’s the amazing photography which sets this creation apart — Julius has combined three images into one to create this stunning look, which manages to make a small model appear much larger. I love how the low angle and out-of-focus foreground foliage gives the tree such physical presence. Excellent stuff.

281 |

LEGO Zelda Great Deku Tree

282 | ]]>
283 | https://www.brothers-brick.com/2019/02/05/long-have-i-served-as-the-guardian-spirit/feed/ 284 | 0 285 | 170980 https://www.brothers-brick.com/2019/02/05/long-have-i-served-as-the-guardian-spirit/
286 | 287 | LEGO Millennium Falcon hides in plain sight 288 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/9myV6FZP3fw/ 289 | https://www.brothers-brick.com/2019/02/05/lego-millennium-falcon-hides-in-plain-sight/#respond 290 | Tue, 05 Feb 2019 20:00:40 +0000 291 | 292 | 293 | 294 | 295 | 296 | 297 | 298 | https://www.brothers-brick.com/?p=170984 299 | 300 | It’s one of the coolest moments in The Empire Strikes Back, when Han Solo evades the Imperials by hiding his ship in plain sight, latched on to the hull of a Star Destroyer. Here this memorable scene is recreated in LEGO bricks by Didier Burtin. The model is immediately recognisable — indeed, at first glance it’s practically indistinguishable from a still from the movie. The Star Destroyer’s surface is impressively detailed, packed with a generous level of detail that breaks up all that grey, and the lighting for the photo is spot-on, managing to capture the stark contrast and drama of the original scene.

301 |

Hidden Millenium Falcon

302 | ]]>
303 | https://www.brothers-brick.com/2019/02/05/lego-millennium-falcon-hides-in-plain-sight/feed/ 304 | 0 305 | 170984 https://www.brothers-brick.com/2019/02/05/lego-millennium-falcon-hides-in-plain-sight/
306 | 307 | The city of Cyrene falls to the Pierian Empire 308 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/HgSjnqROoxQ/ 309 | https://www.brothers-brick.com/2019/02/05/the-city-of-cyrene-falls-to-the-pierian-empire/#respond 310 | Tue, 05 Feb 2019 14:00:18 +0000 311 | 312 | 313 | 314 | 315 | 316 | 317 | 318 | 319 | 320 | https://www.brothers-brick.com/?p=171315 321 | 322 | Despite the inclusion of Classical or Greco-Roman characters in several waves of Collectible Minifigures, the ancient world just isn’t as popular with LEGO Castle builders as the big gray castles of the medieval era. As a result, it’s always refreshing to see great LEGO models from that earlier era. Talented TBB alum Mark Erickson has created a fictional battle between rivals the Pierian Empire and the great city of Tylis. Mark’s diorama is full of fantastic architectural detail — I particularly love the contrast between the tan city walls and the shining white temple with its gold details and green roof.

323 |

The Fall of Cyrene

324 |

325 |

While your eye is certainly drawn past the walls to the beautiful temple, the walls themselves are worth a closer look, with great brick-work in varied, natural colors between what I’m guessing are sandstone columns. This view also shows off the siege tower Mark designed for his Pierian soldiers to surmount the Cyrenian walls.

326 |

The Fall of Cyrene

327 |

Far too many builders neglect minifigures once they’ve completed their cityscape or spaceship or castle, but minifigures bring a LEGO model to life, and Mark’s scene has minifigure action galore. His minifigures sport custom armor and weapons from BrickWarriors, helping to distinguish the blue-and-silver and red-and-gold factions.

328 |

The Fall of Cyrene

329 |

Mark says that he missed entering the annual Colossal Castle Contest this past December because he was busy working on this diorama. With off-angle walls throughout, excellent landscaping, and an engaging story told through minifig action, there’s lots to love in this excellent LEGO creation, and very much worth the wait.

330 |

The Fall of Cyrene

331 | ]]>
332 | https://www.brothers-brick.com/2019/02/05/the-city-of-cyrene-falls-to-the-pierian-empire/feed/ 333 | 0 334 | 171315 https://www.brothers-brick.com/2019/02/05/the-city-of-cyrene-falls-to-the-pierian-empire/
335 | 336 | 1% inspiration, 99% perspiration 337 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/XqvuZoc8ONA/ 338 | https://www.brothers-brick.com/2019/02/05/1-inspiration-99-perspiration/#comments 339 | Tue, 05 Feb 2019 08:00:43 +0000 340 | 341 | 342 | 343 | 344 | 345 | 346 | 347 | https://www.brothers-brick.com/?p=170989 348 | 349 | That was Thomas Edison’s recipe for innovation. But he failed to mention the importance of keeping things simple. When it comes to LEGO creations, sometimes the simplest models are the most impressive, and this wonderful LEGO lightbulb by Josephine Monterosso is a great example. It may be comprised of only seven pieces, but this economy of parts only makes it all the more impressive. The transparent minifigure head and clear space helmet make for the perfect recreation of retro lightbulb curves, and the short length of silver ribbed hose is a nice way to evoke a screw thread. Maybe this LEGO lightbulb will give other builders ideas too!

350 |

Light Bulb

351 | ]]>
352 | https://www.brothers-brick.com/2019/02/05/1-inspiration-99-perspiration/feed/ 353 | 3 354 | 170989 https://www.brothers-brick.com/2019/02/05/1-inspiration-99-perspiration/
355 | 356 | Block-rocking beats from this LEGO Walkman 357 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/lzIx-2Y0fmk/ 358 | https://www.brothers-brick.com/2019/02/04/block-rocking-beats-from-this-lego-walkman/#respond 359 | Tue, 05 Feb 2019 02:00:47 +0000 360 | 361 | 362 | 363 | 364 | 365 | 366 | 367 | 368 | https://www.brothers-brick.com/?p=170977 369 | 370 | When Ralf Langer put together his excellent LEGO headphones and tape cassette, all that was missing was something to provide the tunes. Now he’s filled the gap with a brick rendition of the innovative 80s hardware that reinvented how we listened to music — the Sony Walkman. The colour scheme is a perfect match for the 1979 original, and the details down the side are simply spot-on — don’t miss the use of a silver ingot piece and grille bricks to recreate the volume slider, the offsets so the buttons stand out from the casing, and the nice deployment of the “back-to-back grille tile” technique to make those tiny square holes. I also love that silver stripe separating the blue from the grey — excellent attention to detail.

371 |

It's a Sony - no, a Lego!

372 | ]]>
373 | https://www.brothers-brick.com/2019/02/04/block-rocking-beats-from-this-lego-walkman/feed/ 374 | 0 375 | 170977 https://www.brothers-brick.com/2019/02/04/block-rocking-beats-from-this-lego-walkman/
376 |
377 |
378 | -------------------------------------------------------------------------------- /samples/stackoverflow: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Why can't `cargo build` compile structopt-derive in VS Code? - Stack Overflow 5 | 6 | 7 | most recent 30 from stackoverflow.com 8 | 2019-02-09T20:44:41Z 9 | https://stackoverflow.com/feeds/question/51744103 10 | http://www.creativecommons.org/licenses/by-sa/3.0/rdf 11 | 12 | https://stackoverflow.com/q/51744103 13 | 2 14 | Why can't `cargo build` compile structopt-derive in VS Code? 15 | 16 | 17 | 18 | 19 | Riduidel 20 | https://stackoverflow.com/users/15619 21 | 22 | 23 | 2018-08-08T10:13:06Z 24 | 2018-08-08T13:02:35Z 25 | 26 | 27 | 28 | <p>I'm trying to write a small CLI application using Rust and the excellent structopt crate.</p> 29 | 30 | <p>When I'm using Notepad++ (to write code) and Conemu (to run Cargo commands), everything works fine.</p> 31 | 32 | <p>However, when I'm using VS Code with <a href="https://github.com/rust-lang-nursery/rls-vscode" rel="nofollow noreferrer">Rust plugin</a> (or Eclipse Corrosion), <code>cargo build</code> command fails with this error</p> 33 | 34 | <pre class="lang-none prettyprint-override"><code> Compiling atty v0.2.10 35 | Compiling clap v2.31.2 36 | Compiling structopt-derive v0.2.10 37 | error: linking with `C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\VC\Tools\MSVC\14.14.26428\bin\HostX64\x64\link.exe` failed: exit code: 1104 38 | | 39 | = note: "C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools\\VC\\Tools\\MSVC\\14.14.26428\\bin\\HostX64\\x64\\link.exe" "/NOLOGO" "/NXCOMPAT" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive0.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive1.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive10.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive11.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive12.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive13.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive14.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive15.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive2.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive3.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive4.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive5.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive6.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive7.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive8.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive9.rcgu.o" "/OUT:C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.dll" "/DEF:C:\\Users\\NICOLA~1\\AppData\\Local\\Temp\\rustc.NMAPUPGalI4H\\lib.def" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.crate.metadata.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.crate.allocator.rcgu.o" "/OPT:REF,NOICF" "/DEBUG" "/NATVIS:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\etc\\intrinsic.natvis" "/NATVIS:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\etc\\liballoc.natvis" "/NATVIS:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\etc\\libcore.natvis" "/LIBPATH:C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\libsyn-e2bf8da738ad52ef.rlib" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\libquote-90431d93ebae45fd.rlib" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\libproc_macro2-f91721dd8e02bb17.rlib" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\libunicode_xid-4611d062b1d773c0.rlib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "proc_macro-1f431d761952eacf.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "syntax-c4a428491fc49b8f.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "rustc_errors-5b01c9a7974f0222.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "syntax_pos-09170bc016e0b11a.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "rustc_data_structures-f974a5ad0e93670e.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "serialize-2eb0aeb35010f869.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "rustc_cratesio_shim-2e9a42f968785601.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "std-81327c94ecbc69b1.dll.lib" "C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib\\libcompiler_builtins-e8d853735a158029.rlib" "opengl32.lib" "kernel32.lib" "setupapi.lib" "msimg32.lib" "credui.lib" "winspool.lib" "user32.lib" "gdi32.lib" "secur32.lib" "dbghelp.lib" "advapi32.lib" "advapi32.lib" "ws2_32.lib" "userenv.lib" "shell32.lib" "msvcrt.lib" "/DLL" "/IMPLIB:C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.dll.lib" 40 | = note: LINK : fatal error LNK1104: impossible d'ouvrir le fichier 'C:\Users\nicolas-delsaux\Documents\open-source\rrss2imap\target\debug\deps\structopt_derive-406f571196e63046.dll' 41 | 42 | 43 | error: aborting due to previous error 44 | 45 | error: Could not compile `structopt-derive`. 46 | warning: build failed, waiting for other jobs to finish... 47 | error: build failed 48 | </code></pre> 49 | 50 | <p>It seems like some process has locked the output file, but LockHunter (which I use to detect that kind of locks) doesn't detect any...</p> 51 | 52 | <p>What is the problem? What can I do - beside coding using Notepad++ - to be able to run cargo commands in VS Code?</p> 53 | 54 | 55 | 56 | 57 | https://stackoverflow.com/questions/51744103/-/51744721#51744721 58 | 2 59 | Answer by Riduidel for Why can't `cargo build` compile structopt-derive in VS Code? 60 | 61 | Riduidel 62 | https://stackoverflow.com/users/15619 63 | 64 | 65 | 2018-08-08T10:45:07Z 66 | 2018-08-08T10:45:07Z 67 | <p>Seems like it's a bug in RLS : <a href="https://github.com/rust-lang-nursery/rls/issues/802" rel="nofollow noreferrer">Windows: RLS keeping derive plugin DLLs opened prevents <code>cargo build</code> from working #802</a></p> 68 | 69 | <blockquote> 70 | <p>Whenever the RLS is running for a crate, I can't do cargo build for that crate. It fails with errors like this:</p> 71 | 72 | <p>[...]</p> 73 | 74 | <p>Looking in Process Explorer, RLS has the derive plugin DLLs loaded. I assume this is what's causing cargo to fail, since it can't write to those files while they're loaded. Similarly, cargo clean fails:</p> 75 | 76 | <p>[...]</p> 77 | 78 | <p>If I close VSCode (and thus RLS), building with cargo build works fine again.</p> 79 | 80 | <p>I assume this is Windows-specific due to its file exclusivity behavior. I think this started happening with a recent nightly (3/28?). I suppose this could've been caused by some change in cargo or rustc causing it to write to dlls which were already built or something.</p> 81 | </blockquote> 82 | 83 | <p>So solution should be quite simple : update RLS to its latest version and see the bug being fixed !</p> 84 | 85 | <p>And to update RLS, it's simply a matter of <code>rustup update</code></p> 86 | 87 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use super::settings::*; 2 | 3 | /// This structure defines the feed-level config. 4 | /// All elements here may be configured twice : once at feed level, and once at global level. 5 | /// Obviously, all elements which are not defined at feed level use global configuration 6 | #[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] 7 | pub struct Config { 8 | /// When set, contains the email address used 9 | #[serde(skip_serializing_if = "Option::is_none")] 10 | pub email: Option, 11 | /// When set, contains the folder in which entries for feed will be written 12 | #[serde(skip_serializing_if = "Option::is_none")] 13 | pub folder: Option, 14 | /// When defined, this from field will be used instead of trying to construct it from feed title 15 | #[serde(skip_serializing_if = "Option::is_none")] 16 | pub from: Option, 17 | /// When set to true, images will be inlined 18 | #[serde( 19 | skip_serializing_if = "Settings::is_false", 20 | default = "Settings::default_false" 21 | )] 22 | pub inline_image_as_data: bool, 23 | } 24 | 25 | impl Config { 26 | /// Creates a new instance with all fields set to default "falsy" values : options are set to none and booleans to false 27 | pub fn new() -> Config { 28 | Config { 29 | email: None, 30 | folder: None, 31 | inline_image_as_data: false, 32 | from: None, 33 | } 34 | } 35 | 36 | /// Creates a string view of config. 37 | /// More precisely, outputs the email address and folder in which entries are to be written 38 | /// A default config is given for options set to None. 39 | pub fn to_string(self, default: &Config) -> String { 40 | format!( 41 | "(to: {}) {}", 42 | self.email.unwrap_or_else(|| format!( 43 | "{} (default)", 44 | default.clone().email.unwrap_or_else(|| "".to_owned()) 45 | )), 46 | self.folder.unwrap_or_else(|| format!( 47 | "{} (default)", 48 | default.clone().folder.unwrap_or_else(|| "".to_owned()) 49 | )) 50 | ) 51 | } 52 | 53 | /// Used by serde to skip serialization of default config for feeds 54 | /// This method check if config is the default one (consisting only into None options) 55 | pub fn is_none(config: &Config) -> bool { 56 | config.email.is_none() 57 | && config.folder.is_none() 58 | && config.from.is_none() 59 | && !config.inline_image_as_data 60 | } 61 | 62 | /// Clear all content from this config excepted email address 63 | pub fn clear(&mut self) { 64 | self.folder = None; 65 | } 66 | 67 | /// Get the email value for that feed, be it defined locally or from the default config 68 | pub fn get_email(&self, default: &Config) -> String { 69 | self.clone() 70 | .email 71 | .unwrap_or_else(|| default.clone().email.unwrap_or_else(|| "".to_owned())) 72 | } 73 | 74 | /// Get the folder value for that feed, be it defined locally or from the default config 75 | pub fn get_folder(&self, default: &Config) -> String { 76 | self.clone() 77 | .folder 78 | .unwrap_or_else(|| default.clone().folder.unwrap_or_else(|| "".to_owned())) 79 | } 80 | 81 | /// Compute an inline flag by resolving the two flags with this struct inline images status 82 | pub fn inline(&self, inline:bool, do_not_inline:bool)->bool { 83 | if self.inline_image_as_data { 84 | !do_not_inline 85 | } else { 86 | inline 87 | } 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /src/export.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use std::fs; 4 | 5 | use std::collections::HashMap; 6 | 7 | use super::feed::Feed; 8 | use super::store::Store; 9 | 10 | use treexml::*; 11 | 12 | pub fn export(to_file: &PathBuf, to_store: &Store) { 13 | // First group feeds per storage folder 14 | let grouped = group_feeds(to_store); 15 | // Then write this map of lists 16 | write(to_file, grouped); 17 | } 18 | 19 | fn group_feeds(to_store: &Store) -> HashMap> { 20 | to_store.feeds.iter().fold(HashMap::new(), |mut map, feed| { 21 | let feed = feed.clone(); 22 | let folder = feed.config.get_folder(&to_store.settings.config); 23 | if !map.contains_key(&folder) { 24 | map.insert(folder.clone(), vec![]); 25 | } 26 | let mut updated = vec![feed]; 27 | updated.append(map.get_mut(&folder).unwrap()); 28 | map.insert(folder, updated); 29 | // Return value of closure (which is *not* a return statement ;-) 30 | map 31 | }) 32 | } 33 | 34 | fn write(to_file: &PathBuf, to_store: HashMap>) { 35 | // warn!("exporting feeds {:?}", to_store); 36 | // Prepare the document by setting all boilerplate elements (root, head, body, ...) 37 | let mut root = Element::new("opml"); 38 | root.attributes 39 | .insert("version".to_owned(), "1.0".to_owned()); 40 | let mut header = Element::new("head"); 41 | let mut title = Element::new("title"); 42 | title.text = Some("rrss2imap OPML Export".to_owned()); 43 | header.children.push(title); 44 | root.children.push(header); 45 | let mut body = Element::new("body"); 46 | // Now fill body with outline elements generated from feeds 47 | for (folder, elements) in to_store { 48 | let mut folder_element = Element::new("outline"); 49 | folder_element 50 | .attributes 51 | .insert("text".to_owned(), folder.clone()); 52 | folder_element 53 | .attributes 54 | .insert("title".to_owned(), folder.clone()); 55 | for feed in elements { 56 | let mut outline = Element::new("outline"); 57 | outline 58 | .attributes 59 | .insert("type".to_owned(), "rss".to_owned()); 60 | outline 61 | .attributes 62 | .insert("text".to_owned(), feed.url.clone()); 63 | outline 64 | .attributes 65 | .insert("xmlUrl".to_owned(), feed.url.clone()); 66 | folder_element.children.push(outline); 67 | } 68 | body.children.push(folder_element); 69 | } 70 | // Don't forget to add body after, otherwise we enter into the dangerous realm of borrowed values 71 | root.children.push(body); 72 | let mut document = Document::new(); 73 | document.root = Some(root); 74 | fs::write(to_file, format!("{}", document)) 75 | .unwrap_or_else(|_| panic!("Unable to write file {:?}", to_file)); 76 | } 77 | -------------------------------------------------------------------------------- /src/feed.rs: -------------------------------------------------------------------------------- 1 | use chrono::{NaiveDateTime}; 2 | use tests_bin::unit_tests; 3 | 4 | use super::config::*; 5 | 6 | use super::feed_reader::*; 7 | use super::settings::*; 8 | use super::syndication; 9 | use super::message::*; 10 | 11 | #[unit_tests("feed.rs")] 12 | #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)] 13 | pub struct Feed { 14 | /// Contains url of feed 15 | pub url: String, 16 | /// Contains specific configuration for field 17 | #[serde(skip_serializing_if = "Config::is_none", default = "Config::new")] 18 | pub config: Config, 19 | /// Last time the feed was read 20 | #[serde(default = "Feed::at_epoch")] 21 | pub last_updated: NaiveDateTime, 22 | /// Last message stored in IMAP, allows to correctly process feeds even when no date is provided 23 | /// which, mind you, is totally possible according to RSS specification 24 | #[serde(skip_serializing_if = "Option::is_none")] 25 | pub last_message: Option 26 | } 27 | 28 | impl Feed { 29 | /// Creates a new naivedatetime with a default value (which is, to my mind) a sensible default for computers 30 | pub fn at_epoch() -> NaiveDateTime { 31 | NaiveDateTime::from_timestamp_opt(0, 0).unwrap() 32 | } 33 | 34 | // Convert the parameters vec into a valid feed (if possible) 35 | pub fn from_vec(parameters: Vec) -> Feed { 36 | let mut consumed = parameters; 37 | let url: String = consumed 38 | .pop() 39 | .expect("You must at least define an url to add."); 40 | let mut email: Option = None; 41 | let mut folder: Option = None; 42 | // If there is a second parameter, it can be either email or folder 43 | if !consumed.is_empty() { 44 | let second = consumed.pop().unwrap(); 45 | // If second parameters contains an @, I suppose it is an email address 46 | if second.contains('@') { 47 | debug!( 48 | "Second add parameter {} is considered an email address", 49 | second 50 | ); 51 | email = Some(second) 52 | } else { 53 | warn!("Second add parameter {} is NOT considered an email address, but a folder. NO MORE ARGUMENTS WILL BE PROCESSED", second); 54 | folder = Some(second) 55 | } 56 | } 57 | // If there is a third parameter, it is the folder. 58 | // But if folder was already defined, there is an error ! 59 | if !consumed.is_empty() && folder.is_none() { 60 | folder = Some(consumed.pop().unwrap()); 61 | } 62 | Feed { 63 | url, 64 | config: Config { 65 | email, 66 | folder, 67 | from: None, 68 | inline_image_as_data: false, 69 | }, 70 | last_updated: Feed::at_epoch(), 71 | last_message: None 72 | } 73 | } 74 | 75 | pub fn from_all(url:Option, email:Option, destination:Option, inline:bool) -> Feed { 76 | Feed { 77 | url: url.unwrap(), 78 | config: Config { 79 | email, 80 | folder: destination, 81 | from: None, 82 | inline_image_as_data: inline, 83 | }, 84 | last_updated: Feed::at_epoch(), 85 | last_message: None 86 | } 87 | } 88 | 89 | pub fn to_string(&self, config: &Config) -> String { 90 | format!("{} {}", self.url, self.config.clone().to_string(config)) 91 | } 92 | 93 | /** 94 | * Read the feed and produce the list of messages to write later 95 | */ 96 | pub fn read(&self, index:usize, count:&usize) -> Vec { 97 | info!("Reading feed {}/{} from {}", index+1, count, self.url); 98 | match ureq::get(&self.url).call() { 99 | Ok(response) => match response.into_string() { 100 | Ok(text) => return self.read_response_text(text), 101 | Err(e) => error!("There is no text at {} due to error {}", &self.url, e), 102 | }, 103 | Err(e) => error!("Unable to get {} due to {}.\nTODO Add better http response analysis !", &self.url, e), 104 | } 105 | vec![] 106 | } 107 | 108 | pub fn read_response_text(&self, text:String) -> Vec { 109 | match text.parse::() { 110 | Ok(parsed) => { 111 | return match parsed { 112 | syndication::Feed::Atom(atom_feed) => { 113 | AtomReader {}.read(self, &atom_feed) 114 | } 115 | syndication::Feed::RSS(rss_feed) => { 116 | RssReader {}.read(self, &rss_feed) 117 | } 118 | } 119 | } 120 | Err(e) => error!("Content ar {} is neither Atom, nor RSS {}.\nTODO check real content type to help user.", &self.url, e), 121 | } 122 | vec![] 123 | } 124 | 125 | pub fn process_message(&self, settings:&Settings, message:&Message)->Message { 126 | Message { 127 | authors: message.authors.clone(), 128 | content: Message::get_processed_content(&message.content, self, settings).unwrap(), 129 | id: message.id.clone(), 130 | last_date: message.last_date, 131 | links: message.links.clone(), 132 | title: message.title.clone(), 133 | } 134 | } 135 | 136 | /// Find in the given input feed the new messages 137 | /// A message is considered new if it has a date which is nearer than feed last processed date 138 | /// or (because RSS and Atom feeds may not have dates) if its id is not yet the id of the last 139 | /// processed feed 140 | pub fn find_new_messages(&self, sorted_messages:&[Message])->(usize, usize, bool) { 141 | let head:usize = 0; 142 | let mut tail:usize = 0; 143 | let mut found = false; 144 | // Now do the filter 145 | // This part is not so easy. 146 | // we will first iterate over the various items and for each, check that 147 | // 1 - the message id is not the last read message one 148 | // 2 - if messages have dates, the message date is more recent than the last one 149 | for (position, message) in sorted_messages.iter().enumerate() { 150 | if !found { 151 | match &self.last_message { 152 | Some(id) => if id==&message.id { 153 | tail = position; 154 | found = true; 155 | break; 156 | }, 157 | None => {} 158 | }; 159 | if message.last_date)->Feed { 169 | let sorted_messages = extracted; 170 | let (head, tail, found) = self.find_new_messages(sorted_messages.as_slice()); 171 | let filtered_messages:&[Message] = if found { 172 | &sorted_messages[head..tail] 173 | } else { 174 | sorted_messages.as_slice() 175 | }; 176 | 177 | // And write the messages into IMAP and the feed into JSON 178 | let written_messages:Vec = filtered_messages.iter() 179 | .map(|message| self.process_message(settings, message)) 180 | .inspect(|e| if !settings.do_not_save { e.write_to_imap(self, settings) } ) 181 | .collect(); 182 | let mut last_message:Option<&Message> = written_messages.iter() 183 | // ok, there is a small problem here: if at least two elements have the same value - which is the case when feed 184 | // elements have no dates - the LAST one is used (which is **not** what we want) 185 | // see https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.max_by_key 186 | .max_by_key(|e| e.last_date.timestamp()); 187 | // So, to overcome last problem, if first filtered message has same date than last_message, we replace last by first 188 | // As RSS feeds are supposed to put the latest emitted message in first position 189 | match last_message { 190 | Some(last) => if filtered_messages.len()>1 && filtered_messages[0].last_date==last.last_date { 191 | last_message = Some(&filtered_messages[0]); 192 | }, 193 | _ => {} 194 | } 195 | 196 | let mut returned = self.clone(); 197 | if settings.do_not_save { 198 | warn!("do_not_save is set. As a consequence, feed won't be updated"); 199 | } else { 200 | match last_message { 201 | Some(message) => { 202 | returned.last_updated = message.last_date; 203 | returned.last_message = Some(message.id.clone()); 204 | }, 205 | _ => {} 206 | } 207 | } 208 | returned 209 | } 210 | } 211 | -------------------------------------------------------------------------------- /src/feed_errors.rs: -------------------------------------------------------------------------------- 1 | use custom_error::custom_error; 2 | 3 | custom_error!{ 4 | pub UnparseableFeed 5 | DateIsNotRFC2822{value:String} = "Date {value} is not RFC-2822 compliant", 6 | DateIsNotRFC3339{value:String} = "Date {value} is not RFC-3339 compliant", 7 | DateIsNeitherRFC2822NorRFC3339{value:String} = "Date {value} is neither RFC-2822 nor RFC-3339 compliant", 8 | ChronoCantParse{source: chrono::ParseError} = "chrono can't parse date", 9 | NoDateFound = "absolutly no date field was found in feed", 10 | CantExtractImages{source: super::message::UnprocessableMessage} = "Seems like it was not possible to read message contained images" 11 | } -------------------------------------------------------------------------------- /src/feed_reader.rs: -------------------------------------------------------------------------------- 1 | use chrono::{DateTime, Utc, FixedOffset, NaiveDateTime}; 2 | 3 | use super::feed_errors::*; 4 | use super::message::*; 5 | use atom_syndication::Entry as AtomEntry; 6 | use atom_syndication::Feed as AtomFeed; 7 | use rss::Channel as RssChannel; 8 | use rss::Item as RssItem; 9 | use url::Url; 10 | 11 | use super::feed::*; 12 | use super::feed_utils::*; 13 | 14 | /// The reader trait allow reading data from a web source. 15 | /// It is supposed to be derived for Rss and Atom, but it's only a try currently ... 16 | pub trait Reader { 17 | fn extract(&self, entry:&EntryType, source:&FeedType) -> Result; 18 | fn read_feed_date(&self, source:&FeedType)->NaiveDateTime; 19 | 20 | fn extract_messages(&self, source:&FeedType)->Vec>; 21 | 22 | fn read(&self, feed:&Feed, source:&FeedType)->Vec { 23 | debug!("reading feed {}", &feed.url); 24 | let feed_date = self.read_feed_date(source); 25 | info!( 26 | "Feed date is {} while previous read date is {}", 27 | feed_date, feed.last_updated 28 | ); 29 | let extracted:Vec> = self.extract_messages(source); 30 | 31 | let messages:Result, UnparseableFeed> = extracted.into_iter().collect(); 32 | messages.unwrap_or(vec![]) 33 | } 34 | } 35 | 36 | pub struct AtomReader {} 37 | 38 | impl AtomReader { 39 | fn extract_authors_from_atom(entry: &AtomEntry, feed: &AtomFeed) -> Vec<(String, String)> { 40 | let domain = AtomReader::find_atom_domain(feed); 41 | // This is where we also transform author names into urls in order 42 | // to have valid email addresses everywhere 43 | let mut message_authors: Vec = entry 44 | .authors() 45 | .iter() 46 | .map(|a| a.name().to_owned()) 47 | .collect(); 48 | if message_authors.is_empty() { 49 | message_authors = vec![feed.title().to_owned().to_string()] 50 | } 51 | sanitize_message_authors(message_authors, domain) 52 | } 53 | 54 | fn find_atom_domain(feed: &AtomFeed) -> String { 55 | return feed 56 | .links() 57 | .iter() 58 | .filter(|link| link.rel() == "self" || link.rel() == "alternate").find(|link| !link.href().is_empty()) 59 | // Get the link 60 | .map(|link| link.href()) 61 | // Transform it into an url 62 | .map(|href| Url::parse(href).unwrap()) 63 | // then get host 64 | .map(|url| url.host_str().unwrap().to_string()) 65 | // and return value 66 | .unwrap_or("todo.find.domain.rss".to_string()); 67 | } 68 | } 69 | 70 | impl Reader for AtomReader { 71 | fn extract(&self, entry: &AtomEntry, source: &AtomFeed) -> Result { 72 | info!("Reading atom entry {} from {:?}", entry.id(), entry.links()); 73 | let authors = AtomReader::extract_authors_from_atom(entry, source); 74 | let last_date = entry 75 | .updated() 76 | .naive_utc(); 77 | let content = match entry.content() { 78 | Some(content) => content.value().unwrap(), 79 | None => match entry.summary() { 80 | Some(text)=> text.as_str(), 81 | None=>"" 82 | } 83 | } 84 | .to_owned(); 85 | let message = Message { 86 | authors, 87 | content, 88 | id: entry.id().to_owned(), 89 | last_date, 90 | links: entry.links().iter().map(|l| l.href().to_owned()).collect(), 91 | title: entry.title().as_str().to_string() 92 | }; 93 | Ok(message) 94 | } 95 | 96 | fn read_feed_date(&self, source:&AtomFeed)->NaiveDateTime { 97 | source.updated().naive_utc() 98 | } 99 | 100 | fn extract_messages(&self, source:&AtomFeed)->Vec> { 101 | source.entries() 102 | .iter() 103 | .map(|e| self.extract(e, source)) 104 | .collect() 105 | } 106 | } 107 | 108 | pub struct RssReader {} 109 | 110 | impl RssReader { 111 | fn extract_authors_from_rss(entry: &RssItem, feed: &RssChannel) -> Vec<(String, String)> { 112 | let domain = RssReader::find_rss_domain(feed); 113 | // This is where we also transform author names into urls in order 114 | // to have valid email addresses everywhere 115 | let message_authors: Vec; 116 | match entry.author() { 117 | Some(l) => message_authors = vec![l.to_owned()], 118 | _ => message_authors = vec![feed.title().to_owned()], 119 | } 120 | sanitize_message_authors(message_authors, domain) 121 | } 122 | fn find_rss_domain(feed: &RssChannel) -> String { 123 | return Some(feed.link()) 124 | .map(|href| Url::parse(href).unwrap()) 125 | // then get host 126 | .map(|url| url.host_str().unwrap().to_string()) 127 | // and return value 128 | .unwrap_or("todo.find.domain.atom".to_string()); 129 | } 130 | 131 | fn try_hard_to_parse(date:String) -> Result, UnparseableFeed> { 132 | let parsed = rfc822_sanitizer::parse_from_rfc2822_with_fallback(&date); 133 | if parsed.is_ok() { 134 | Ok(parsed?) 135 | } else { 136 | let retry = DateTime::parse_from_rfc3339(&date); 137 | if retry.is_ok() { 138 | Ok(retry?) 139 | } else { 140 | Err(UnparseableFeed::DateIsNeitherRFC2822NorRFC3339 {value:date}) 141 | } 142 | } 143 | } 144 | 145 | fn extract_date_from_rss(entry: &RssItem, feed: &RssChannel) -> Result, UnparseableFeed> { 146 | if entry.pub_date().is_some() { 147 | let mut pub_date = entry.pub_date().unwrap().to_owned(); 148 | pub_date = pub_date.replace("UTC", "UT"); 149 | RssReader::try_hard_to_parse(pub_date) 150 | } else if entry.dublin_core_ext().is_some() 151 | && !entry.dublin_core_ext().unwrap().dates().is_empty() 152 | { 153 | let pub_date = &entry.dublin_core_ext().unwrap().dates()[0]; 154 | Ok(DateTime::parse_from_rfc3339(pub_date)?) 155 | } else { 156 | debug!("feed item {:?} date can't be parsed, as it doesn't have neither pub_date nor dc:pub_date. We will replace it with feed date if possible", 157 | &entry.link() 158 | ); 159 | if feed.pub_date().is_some() { 160 | let pub_date = feed.pub_date().unwrap().to_owned(); 161 | RssReader::try_hard_to_parse(pub_date) 162 | } else if feed.last_build_date().is_some() { 163 | let last_pub_date = feed.last_build_date().unwrap().to_owned(); 164 | RssReader::try_hard_to_parse(last_pub_date) 165 | } else { 166 | Ok(DateTime::::from_utc( 167 | Feed::at_epoch(), 168 | FixedOffset::east_opt(0).unwrap())) 169 | } 170 | } 171 | } 172 | } 173 | 174 | impl Reader for RssReader { 175 | fn extract(&self, entry: &RssItem, source: &RssChannel) -> Result { 176 | info!("Reading RSS entry {:?} from {:?}", entry.guid(), entry.link()); 177 | let authors = RssReader::extract_authors_from_rss(entry, source); 178 | let content = entry 179 | .content() 180 | .unwrap_or_else(|| entry.description().unwrap_or("")) 181 | // First step is to fix HTML, so load it using html5ever 182 | // (because there is no better html parser than a real browser one) 183 | // TODO implement image inlining 184 | .to_owned(); 185 | let links = match entry.link() { 186 | Some(l) => vec![l.to_owned()], 187 | _ => vec![], 188 | }; 189 | let id = if links.is_empty() { 190 | match entry.guid() { 191 | Some(g) => g.value().to_owned(), 192 | _ => "no id".to_owned(), 193 | } 194 | } else { 195 | links[0].clone() 196 | }; 197 | let last_date = RssReader::extract_date_from_rss(entry, source); 198 | let message = Message { 199 | authors, 200 | content, 201 | id, 202 | last_date: last_date?.naive_utc(), 203 | links, 204 | title: entry.title().unwrap_or("").to_owned(), 205 | }; 206 | Ok(message) 207 | } 208 | 209 | fn extract_messages(&self, source:&RssChannel)->Vec> { 210 | source.items() 211 | .iter() 212 | .map(|e| self.extract(e, source)) 213 | .collect() 214 | } 215 | 216 | fn read_feed_date(&self, source:&RssChannel)->NaiveDateTime { 217 | let n = Utc::now(); 218 | let feed_date_text = match source.pub_date() { 219 | Some(p) => p.to_owned(), 220 | None => match source.last_build_date() { 221 | Some(l) => l.to_owned(), 222 | None => n.to_rfc2822(), 223 | }, 224 | }; 225 | DateTime::parse_from_rfc2822(&feed_date_text) 226 | .unwrap() 227 | .naive_utc() 228 | 229 | } 230 | } 231 | -------------------------------------------------------------------------------- /src/feed_utils.rs: -------------------------------------------------------------------------------- 1 | use regex::Regex; 2 | use tests_bin::unit_tests; 3 | 4 | /// 5 | /// Sanitize a list of message authors 6 | /// 7 | /// # Arguments 8 | /// 9 | /// * `message_authors` a list of message autros to sanitize 10 | /// * `domain` a default domain string, used when domain is given 11 | #[unit_tests("feed_utils/can_sanitize_message_authors.rs")] 12 | pub fn sanitize_message_authors(message_authors:Vec, domain:String)->Vec<(String, String)> { 13 | let fixed = message_authors 14 | .iter() 15 | .map(|author| { 16 | sanitize_email(author, &domain) 17 | }) 18 | .collect(); 19 | fixed 20 | } 21 | 22 | /// 23 | /// Trim the input string using the given set of characters as potential separators 24 | /// 25 | /// # Arguments 26 | /// 27 | /// * `text` text to trim 28 | /// * `characters` characters to use as separator 29 | /// 30 | /// # Return 31 | /// 32 | /// The trimmed text 33 | /// 34 | #[unit_tests("feed_utils/can_trim_to_chars.rs")] 35 | fn trim_to_chars(text:&str, characters:Vec<&str>)->String { 36 | let mut remaining = text; 37 | for cutter in characters { 38 | let elements:Vec<&str> = remaining.split(cutter).collect(); 39 | remaining = elements[0].trim(); 40 | } 41 | remaining.to_string() 42 | } 43 | 44 | /// 45 | /// Sanitizes email using "good" regular expression 46 | /// (which I obviously don't understand anymore) able to remove unwanted characters in email address 47 | #[unit_tests("feed_utils/can_sanitize_email.rs")] 48 | pub fn sanitize_email(email:&String, domain:&String)->(String, String) { 49 | lazy_static! { 50 | static ref EMAIL_AND_NAME_DETECTOR:Regex = 51 | Regex::new("([[:alpha:]_%\\+\\-\\.]+@[[:alpha:]_%\\+\\-]+\\.[[:alpha:]_%\\+\\-]+{1,}) \\(([^\\)]*)\\)").unwrap(); 52 | } 53 | lazy_static! { 54 | static ref BAD_CHARACTER_REMOVER:Regex = 55 | Regex::new("[^[:alnum:].]").unwrap(); 56 | } 57 | if EMAIL_AND_NAME_DETECTOR.is_match(email) { 58 | let captures = EMAIL_AND_NAME_DETECTOR.captures(email).unwrap(); 59 | // Maybe we could rewrite it in a better way 60 | let name:String = captures.get(2).unwrap().as_str().to_string(); 61 | let email:String = captures.get(1).unwrap().as_str().to_string(); 62 | (name, email) 63 | } else { 64 | // When no email is provided, use domain name 65 | let email = if email.is_empty() { 66 | domain 67 | } else { 68 | email 69 | }; 70 | // Remove bad characters 71 | let trimmed:String = trim_to_chars(email, vec!["|", ":", "-", "<", ">"]); 72 | let lowercased = trimmed.to_lowercase(); 73 | let tuple = (trimmed, 74 | BAD_CHARACTER_REMOVER.replace_all(&lowercased, "_") 75 | ); 76 | (tuple.0, format!("{}@{}", tuple.1, domain)) 77 | } 78 | } 79 | -------------------------------------------------------------------------------- /src/image_to_data.rs: -------------------------------------------------------------------------------- 1 | use base64::engine::*; 2 | 3 | use lol_html::{rewrite_str, element, RewriteStrSettings}; 4 | use lol_html::errors::*; 5 | use tests_bin::unit_tests; 6 | 7 | #[unit_tests("image_to_data.rs")] 8 | pub fn transform(document: &String) -> Result { 9 | 10 | rewrite_str(document, 11 | RewriteStrSettings { 12 | element_content_handlers: vec![ 13 | // Rewrite images having src where src doesn't start with data 14 | element!("img[src]", |el| { 15 | let src:String = el 16 | .get_attribute("src") 17 | .unwrap(); 18 | debug!("processing image at url {}", &src); 19 | 20 | if !src.starts_with("data") { 21 | // Now it's time to rewrite! 22 | // Now download image source and base64 encode it ! 23 | debug!("reading image from {}", &src); 24 | if let Ok(response) = ureq::get(&src).call() { 25 | let mut image: Vec = vec![]; 26 | if let Ok(_value) = response.into_reader().read_to_end(&mut image) { 27 | let image_bytes = image.as_slice(); 28 | let encoded = general_purpose::STANDARD_NO_PAD.encode(image_bytes); 29 | let image_mime_type = tree_magic_mini::from_u8(image_bytes); 30 | let encoded_image = format!("data:{};base64,{}", image_mime_type, encoded); 31 | el.set_attribute("src", &encoded_image).unwrap(); 32 | } 33 | } 34 | } 35 | 36 | Ok(()) 37 | }) 38 | ], 39 | ..RewriteStrSettings::default() 40 | }) 41 | } 42 | -------------------------------------------------------------------------------- /src/import.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use std::fs::File; 4 | use std::io::Read; 5 | 6 | use super::config::Config; 7 | use super::feed::Feed; 8 | use super::store::Store; 9 | 10 | use treexml::*; 11 | 12 | pub fn import(from_file: &PathBuf, to_store: &mut Store) { 13 | let mut file = 14 | File::open(from_file).unwrap_or_else(|_| panic!("Unable to open file {:?}", from_file)); 15 | let mut contents = String::new(); 16 | file.read_to_string(&mut contents) 17 | .unwrap_or_else(|_| panic!("Unable to read file {:?}", from_file)); 18 | 19 | let doc = Document::parse(contents.as_bytes()).unwrap(); 20 | let root = doc.root.unwrap(); 21 | 22 | // old style parsing is good, because it is old :-) 23 | for element in root.children { 24 | match element.name.as_ref() { 25 | "head" => debug!("Reading {}", element), 26 | "body" => import_body(element, to_store, ""), 27 | _ => error!("element {:?} was unexpected, please fill a bug !", element), 28 | } 29 | } 30 | } 31 | 32 | fn import_body(body: Element, to_store: &mut Store, folder: &str) { 33 | for element in body.children { 34 | match element.name.as_ref() { 35 | "outline" => import_outline(element, to_store, folder), 36 | _ => error!("element {:?} was unexpected, please fill a bug!", element), 37 | } 38 | } 39 | } 40 | 41 | fn import_outline(outline: Element, to_store: &mut Store, folder: &str) { 42 | if outline.children.is_empty() { 43 | // An outline without children is considered an OPML entry. Does it have the right set of attributes ? 44 | if outline.attributes.contains_key("type") 45 | && outline.attributes.contains_key("text") 46 | && outline.attributes.contains_key("xmlUrl") 47 | { 48 | let url = outline.attributes.get("xmlUrl"); 49 | let feed = Feed { 50 | url: url.unwrap().to_string(), 51 | config: Config { 52 | email: None, 53 | folder: Some(folder.to_string()), 54 | from: None, 55 | inline_image_as_data: false, 56 | }, 57 | last_updated: Feed::at_epoch(), 58 | last_message: None, 59 | }; 60 | to_store.add_feed(feed); 61 | } else { 62 | error!("outline {:?} has no children, but doesn't has the right set of attributes. Please fill a bug!", outline.attributes); 63 | } 64 | } else { 65 | // An outline with children is considered an OPML folder. Does it have the right set of attributes ? 66 | if outline.attributes.contains_key("text") && outline.attributes.contains_key("title") { 67 | let folder = &outline.attributes["text"]; 68 | import_body(outline.clone(), to_store, &folder.to_string()); 69 | } else { 70 | error!("outline {:?} has children, but doesn't has the right set of attributes. Please fill a bug!", outline.attributes); 71 | } 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | //! Application transforming rss feeds into email by directly pushing the entries into IMP folders. 2 | //! This application is an adaption of the rss2imap Python script to Rust. 3 | //! 4 | //! #### How to use ? 5 | //! 6 | //! The simplest way to understand what to do is just to run `rrss2imap --help` 7 | //! 8 | //! It should output something like 9 | //! 10 | //! FLAGS: 11 | //! -h, --help Prints help information 12 | //! -V, --version Prints version information 13 | //! 14 | //! SUBCOMMANDS: 15 | //! add Adds a new feed given its url 16 | //! delete Delete the given feed 17 | //! email Changes email address used in feed file to be the given one 18 | //! export Export subscriptions as opml file 19 | //! help Prints this message or the help of the given subcommand(s) 20 | //! import import the given opml file into subscriptions 21 | //! list List all feeds configured 22 | //! new Creates a new feedfile with the given email address 23 | //! reset Reset feedfile (in other words, remove everything) 24 | //! run Run feed parsing and transformation 25 | //! 26 | //! Which give you a glimpse of what will happen 27 | //! 28 | //! Each of these commands also provide some help, when run with the same `--help` flag. 29 | //! 30 | //! The important operations to memorize are obviously 31 | //! 32 | //! #### `rrss2imap new` 33 | //! 34 | //! Creates a new `config.json` file in the configuration directory 35 | //! (`~/.config/rrss2imap/` on linux, 36 | //! `~/Library/Preferences/org.Rrss2imap.rrss2imap` on macOS, 37 | //! `AppData\Roaming\Rrss2imap\rrss2imap\` on Windows). At init time, the 38 | //! config file will only contains `settings` element with the email address 39 | //! set. You **have** to edit this file and set 40 | //! 41 | //! * the used imap server 42 | //! ** with user login and password 43 | //! ** and security settings (secure should contain `{"Yes": secure port}` for 44 | //! imap/s or `{"No": unsecure port}` for simple imap) 45 | //! * the default config 46 | //! ** folder will be the *full path to an imap folder* where entries will 47 | //! fall in (e.g., `INBOX.News`). The exact syntax depends on your email provider. 48 | //! ** email will be the recipient email address (which may not be yours for easier filtering) 49 | //! ** Base64 image inlining 50 | //! 51 | //! `feeds` is the list of all rss feeds; use `rrss2imap add` to add a new feed. 52 | //! 53 | //! #### `rrss2imap add` 54 | //! 55 | //! This command will add a new feed to your config. You can directly set here the email recipient as well as the folder 56 | //! (but not the base64 image inlining parameter) 57 | //! 58 | //! #### `rrss2imap run` 59 | //! 60 | //! THis is the main command. It will 61 | //! 62 | //! 1. get all rss/atom feed contents 63 | //! 2. List all new entries in these feeds 64 | //! 3. Transform these entries into valid email messages 65 | //! 4. Push these mail messages directly on IMAP server 66 | //! 67 | //! #### `rrss2imap list` 68 | //! 69 | //! Displays a list of the rss feeds. Here is an example 70 | //! 71 | //! ``` 72 | //! 0 : http://tontof.net/?rss (to: Nicolas Delsaux (default)) RSS/rrss2imap (default) 73 | //! 1 : https://www.brothers-brick.com/feed/ (to: Nicolas Delsaux (default)) RSS/rrss2imap (default) 74 | //! 2 : https://nicolas-delsaux.hd.free.fr/rss-bridge/?action=display&bridge=LesJoiesDuCode&format=AtomFormat (to: Nicolas Delsaux (default)) RSS/rrss2imap (default) 75 | //! ``` 76 | //! 77 | //! Please notice that each entry has an associated number, which is the one to enter when running `rrss2imap delete ` 78 | //! 79 | //! #### `config.json` format 80 | //! 81 | //! A typical feedfile will look like this 82 | //! 83 | //! ```json 84 | //! { 85 | //! "settings": { 86 | //! "email": { 87 | //! "server": "the imap server of your mail provider", 88 | //! "user": "your imap user name", 89 | //! "password": "your imap user password", 90 | //! "secure": { 91 | //! "Yes": 993 // Set to "Yes": port for imaps or "No": port for unsecure imap 92 | //! } 93 | //! }, 94 | //! // This config is to be used for all feeds 95 | //! "config": { 96 | //! // This is the email address written in each mail sent. It can be different from the email user 97 | //! "email": "Nicolas Delsaux ", 98 | //! // This is the imap folder in which mails will be written 99 | //! "folder": "RSS/rrss2imap" 100 | //! // Setting this to true will force rrss2imap to transform all images into 101 | //! // base64. This prevents images from beind downloaded (and is really cool when reading feeds from a smartphone) 102 | //! // But largely increase each mail size (which can be quite bothering) 103 | //! "inline_image_as_data": true 104 | //! } 105 | //! }, 106 | //! "feeds": [ 107 | //! { 108 | //! "url": "http://tontof.net/?rss", 109 | //! // This last updated is updated for each entry and should be enough to have rss items correctly read 110 | //! "last_updated": "2019-05-04T16:53:15", 111 | //! "config": { 112 | //! // each config element can be overwritten at the feed level 113 | //! } 114 | //! }, 115 | //! ``` 116 | //! 117 | 118 | extern crate structopt; 119 | #[macro_use] 120 | extern crate log; 121 | extern crate serde; 122 | #[macro_use] 123 | extern crate serde_derive; 124 | extern crate serde_json; 125 | extern crate flexi_logger; 126 | extern crate treexml; 127 | extern crate chrono; 128 | extern crate rfc822_sanitizer; 129 | extern crate unidecode; 130 | #[macro_use] 131 | extern crate lazy_static; 132 | #[macro_use] 133 | extern crate human_panic; 134 | extern crate lol_html; 135 | extern crate imap; 136 | extern crate base64; 137 | extern crate atom_syndication; 138 | extern crate rss; 139 | extern crate xhtmlchardet; 140 | extern crate url; 141 | extern crate regex; 142 | extern crate custom_error; 143 | use flexi_logger::Logger; 144 | use std::path::PathBuf; 145 | use structopt::StructOpt; 146 | use std::error::Error; 147 | 148 | mod config; 149 | mod export; 150 | mod feed_errors; 151 | mod feed_reader; 152 | mod feed_utils; 153 | mod feed; 154 | mod image_to_data; 155 | mod import; 156 | mod message; 157 | mod settings; 158 | mod store; 159 | mod syndication; 160 | 161 | /// 162 | /// rrss2imap is a script used to transform rss feed entries into mail messages that are directly dropped 163 | /// into your mailbox by the grace of imap protocol 164 | /// 165 | #[derive(Debug, StructOpt)] 166 | #[structopt(author=env!("CARGO_PKG_AUTHORS"))] 167 | struct RRSS2IMAP { 168 | /// Verbose mode (-v, -vv, -vvv) 169 | #[structopt(short, long, parse(from_occurrences))] 170 | verbose: u8, 171 | #[structopt(subcommand)] 172 | cmd: Command 173 | } 174 | 175 | #[derive(Debug, StructOpt)] 176 | enum Command { 177 | /// Creates a new feedfile with the given email address 178 | #[structopt(name = "new")] 179 | New { 180 | /// email the notifications will be sent to 181 | email: String, 182 | }, 183 | /// Changes email address used in feed file to be the given one 184 | #[structopt(name = "email")] 185 | Email { email: String }, 186 | /// Run feed parsing and transformation 187 | #[structopt(name = "run")] 188 | Run, 189 | /// Adds a new feed given its url. 190 | /// This option can use either named parameters or positional parameters. 191 | /// Although positional parameters may seems simpler to use, they're of a more weird usage 192 | /// (and may be sometimes buggy) 193 | #[structopt(name = "add")] 194 | Add { 195 | /// url of the feed 196 | #[structopt(short = "u", long = "url")] 197 | url:Option, 198 | /// email address to use to forward feed content 199 | #[structopt(short = "e", long = "email")] 200 | email:Option, 201 | /// destination folder of the email 202 | #[structopt(short = "d", long = "destination")] 203 | destination:Option, 204 | /// inline image in this feed (useful only when default is to not include images) 205 | #[structopt(short = "i", long = "inline-mages")] 206 | inline_images:bool, 207 | /// Don't inline image in this feed (useful only when default is to include images) 208 | #[structopt(short = "x", long = "do-not-inline-mages")] 209 | do_not_inline_images:bool, 210 | /// Parameters used to add the feed. Expected parameters are 211 | /// 212 | /// - url of the feed. web page urls are not yet supported. Given as first parameters, **mandatory** 213 | /// 214 | /// - email address to use to forward feed content, **optional** 215 | /// 216 | /// - destination folder of feed content, **optional** 217 | /// 218 | /// Notice parameters have to be given in THIS order. 219 | parameters: Vec, 220 | }, 221 | /// List all feeds configured 222 | #[structopt(name = "list")] 223 | List, 224 | /// Reset feedfile (in other words, remove everything) 225 | #[structopt(name = "reset")] 226 | Reset, 227 | /// Delete the given feed 228 | #[structopt(name = "delete")] 229 | Delete { 230 | // index of the feed to delete 231 | feed: u32, 232 | }, 233 | /// Export subscriptions as opml file 234 | #[structopt(name = "export")] 235 | Export { 236 | /// Output file, stdout if not present 237 | #[structopt(parse(from_os_str))] 238 | output: Option, 239 | }, 240 | /// import the given opml file into subscriptions 241 | #[structopt(name = "import")] 242 | Import { 243 | /// Output file, stdout if not present 244 | #[structopt(parse(from_os_str))] 245 | input: Option, 246 | }, 247 | } 248 | 249 | /// Main function simply load the RRSS2IMAP struct from the command-line arguments 250 | pub fn main() -> Result<(), Box> { 251 | if !cfg!(debug_assertions) { 252 | setup_panic!(); 253 | } 254 | let opt = RRSS2IMAP::from_args(); 255 | 256 | // Configure logger 257 | Logger::try_with_env_or_str( 258 | match opt.verbose { 259 | 0 => "warn, rrss2imap = info", 260 | 1 => "warn, rrss2imap = debug", 261 | 2 => "warn, rrss2imap = trace", 262 | _ => "trace", }) 263 | .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)) 264 | .format(match opt.verbose { 265 | 0 => flexi_logger::colored_default_format, 266 | 1 => flexi_logger::colored_default_format, 267 | 2 => flexi_logger::colored_detailed_format, 268 | _ => flexi_logger::colored_with_thread, }) 269 | .start() 270 | .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e)); 271 | 272 | let store_path = store::find_store(); 273 | let store_result = store::Store::load(&store_path); 274 | match store_result { 275 | Ok(mut store) => { 276 | match opt.cmd { 277 | Command::New { email } => store.init_config(email), 278 | Command::Email { email } => store.set_email(email), 279 | 280 | Command::List => store.list(), 281 | 282 | Command::Add { url, email, destination, inline_images, do_not_inline_images, parameters } => 283 | store.add(url, email, destination, store.settings.config.inline(inline_images, do_not_inline_images), parameters), 284 | Command::Delete { feed } => store.delete(feed), 285 | 286 | Command::Reset => store.reset(), 287 | 288 | Command::Run => store.run(), 289 | 290 | Command::Export { output } => store.export(output), 291 | Command::Import { input } => store.import(input), 292 | } 293 | }, 294 | Err(e) => { 295 | error!("Impossible to open store {}\n{}", store_path.to_string_lossy(), e); 296 | } 297 | } 298 | Ok(()) 299 | } 300 | -------------------------------------------------------------------------------- /src/message.css: -------------------------------------------------------------------------------- 1 | img { 2 | max-width: 100% !important; 3 | height: auto; 4 | } 5 | 6 | body, 7 | #body { 8 | font-size: 12pt; 9 | word-wrap: break-word; 10 | -webkit-nbsp-mode: space; 11 | -webkit-line-break: after-white-space; 12 | font-family: Georgia, Times New Roman, Times, serif; 13 | } 14 | 15 | a:link { 16 | color: #0000cc 17 | } 18 | 19 | h1.header a { 20 | font-weight: normal; 21 | text-decoration: none; 22 | color: black; 23 | } 24 | 25 | .summary { 26 | font-size: 80%; 27 | font-style: italic; 28 | } 29 | -------------------------------------------------------------------------------- /src/message.rs: -------------------------------------------------------------------------------- 1 | use chrono::NaiveDateTime; 2 | 3 | use super::feed::Feed; 4 | use super::image_to_data; 5 | use super::settings::*; 6 | use mail_builder::MessageBuilder; 7 | use custom_error::custom_error; 8 | 9 | custom_error!{pub UnprocessableMessage 10 | CantPutDateInMessage{ value:String } = "EmailMessage can't parse date from {value}", 11 | CantPutFirstAuthorInMessage { value:String } = "Unable to parse first author {value}. 12 | Please consider adding in feed config the \"from\": ... field", 13 | CantWriteTransformedMessage = "Can't re-write transformed message after image Base64'ing" 14 | } 15 | 16 | /// 17 | /// Structure for storing message data prior to having these messages written to IMAP. 18 | /// This structure serves as a common interface for Item/Entry 19 | #[derive(Clone)] 20 | pub struct Message { 21 | /// List of message authors 22 | pub authors: Vec<(String, String)>, 23 | /// Message content. Image extraction should happen BEFORE that storage. 24 | pub content: String, 25 | /// Message id 26 | pub id: String, 27 | pub last_date: NaiveDateTime, 28 | pub links: Vec, 29 | pub title: String, 30 | } 31 | 32 | impl Message { 33 | pub fn write_to_imap(&self, feed: &Feed, settings: &Settings) { 34 | let folder = feed.config.get_folder(&settings.config); 35 | let content = self.build_message(feed, settings); 36 | match content { 37 | Ok(text) => { 38 | debug!("===========================\nWriting message content to IMAP\n{}\n===========================", 39 | text); 40 | match settings.email.append(&folder, &text) { 41 | Ok(_) => debug!("Successfully written {}", self.title), 42 | Err(e) => error!( 43 | "{}\nUnable to select mailbox {}. Item titled {} won't be written", 44 | e, &folder, self.title 45 | ), 46 | } 47 | }, 48 | Err(error) => { 49 | warn!("Couldn(t write message {:?} from feed {} due to {}", self.links, feed.url, error); 50 | } 51 | } 52 | } 53 | 54 | fn build_from(&self, feed:&Feed, _settings:&Settings)->(String, String) { 55 | match &feed.config.from { 56 | Some(from) =>(from.to_owned(), from.to_owned()), 57 | None => { 58 | if self.authors.is_empty() { 59 | ("Unkown author".to_owned(), "what@what.com".to_owned()) 60 | } else { 61 | self.authors[0].to_owned() 62 | } 63 | } 64 | } 65 | } 66 | 67 | fn build_message(&self, feed: &Feed, settings: &Settings) -> Result { 68 | let content = self.extract_content(feed, settings); 69 | debug!("===========================\nCreating message content\n{}\n===========================", content); 70 | let from = self.build_from(feed, settings); 71 | let _date = self.date_text(); 72 | let to_addr = settings.config.email.as_ref().unwrap_or(&settings.email.user); 73 | let email = MessageBuilder::new() 74 | .from(from) 75 | .to(to_addr.as_str()) 76 | .subject(str::replace(self.title.as_str(), "\n", "")) 77 | .html_body(content.as_str()) 78 | .date(self.last_date.timestamp()) 79 | .write_to_string() 80 | .unwrap(); 81 | Ok(email) 82 | } 83 | 84 | /// Makes a valid HTML file out of the given Item. 85 | /// This method provides all the transformation that should happen 86 | fn extract_content(&self, _feed: &Feed, _settings: &Settings) -> String { 87 | let style = include_str!("message.css"); 88 | let title = format!("

{}

", 89 | self.id, 90 | self.title); 91 | let body = format!("
{}
", self.content); 92 | let links = self.links.iter() 93 | .map(|l| format!("

URL: {}

", l, l)) 94 | .collect::>() 95 | .join("\n") 96 | ; 97 | format!(" 98 | 99 | 100 | 101 | 104 | 105 | 106 | 107 |
108 | {} 109 | {} 110 | {} 111 |
112 | 113 | 114 | ", 115 | style, 116 | title, 117 | body, 118 | links) 119 | } 120 | 121 | /// 122 | /// Process the feed effective content. 123 | /// This should allow 124 | /// * image transformation into base64 when needed 125 | /// 126 | pub fn get_processed_content(html_content:&String, feed: &Feed, settings: &Settings) -> Result { 127 | if feed.config.inline_image_as_data || settings.config.inline_image_as_data { 128 | match image_to_data::transform(html_content) { 129 | Ok(transformed_html_content) => Ok(transformed_html_content), 130 | Err(_) => Err(UnprocessableMessage::CantWriteTransformedMessage) 131 | } 132 | } else { 133 | Ok(html_content.clone()) 134 | } 135 | } 136 | 137 | fn date_text(&self) -> String { 138 | self.last_date 139 | .format("%a, %d %b %Y %H:%M:%S -0000") 140 | .to_string() 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /src/settings.rs: -------------------------------------------------------------------------------- 1 | use imap::error::Result; 2 | use imap::Session; 3 | use std::{thread, time}; 4 | 5 | use super::config::Config; 6 | 7 | /// Secured connection or not ? 8 | /// Whichever is chosen, user has to give the port as parameter 9 | #[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] 10 | pub enum Secure { 11 | No(u16), 12 | Yes(u16), 13 | } 14 | /// mail config 15 | /// I SHOULD allow a kind of Keepass access. 16 | /// But as code isn't expected to run on any kind of UI-aware machine (but on a headless Raspbian), 17 | /// I can't connect it to Keepass. 18 | /// So I should implement a kind of secure storage 19 | #[derive(Debug, Deserialize, Serialize, PartialEq)] 20 | pub struct Email { 21 | /// imap server we want to connect to 22 | pub server: String, 23 | /// username used to connect to that server 24 | pub user: String, 25 | /// password used to connect to that server. 26 | /// **WARNING** THis password is in **no way** encrypted, which makes rrss2imap a "not-so-secured" software 27 | pub password: String, 28 | /// secured connection state 29 | #[serde(default = "Email::default_secure")] 30 | pub secure: Secure, 31 | #[serde(default = "Email::default_retry_max_count")] 32 | pub retry_max_count: u8, 33 | #[serde(default = "Email::default_retry_delay")] 34 | pub retry_delay: u64, 35 | } 36 | 37 | /// Imap effective connection type (ie once connection has been established). 38 | /// This enum presents a simple interface allowing seamless access for (un)secured servers. 39 | #[derive(Debug)] 40 | pub enum Imap { 41 | Secured(Session>), 42 | Insecured(Session), 43 | } 44 | 45 | impl Imap { 46 | /// Appends a new message to the given server. 47 | pub fn append, B: AsRef<[u8]>>(&mut self, mailbox: S, content: B) -> Result<()> { 48 | match self { 49 | Imap::Secured(ref mut session) => session.append(mailbox, content), 50 | Imap::Insecured(ref mut session) => session.append(mailbox, content), 51 | } 52 | } 53 | } 54 | 55 | impl Email { 56 | /// Appends a new message to the given server. 57 | /// This method decorates the Imap::append method by adding retry ability. 58 | pub fn append, B: AsRef<[u8]>>(&self, mailbox: &S, content: &B) -> Result<()> { 59 | let mut count = 0; 60 | loop { 61 | count += 1; 62 | let mut imap = self.start(); 63 | let result = imap.append(mailbox, content); 64 | if result.is_err() { 65 | if count > self.retry_max_count { 66 | return result; 67 | } else { 68 | error!( 69 | "Previous append attempt failed with {}. Retrying ({}/{})in {} s.!", 70 | result.unwrap_err(), 71 | count, 72 | self.retry_max_count, 73 | self.retry_delay 74 | ); 75 | // TODO maybe remove that once code is parallel 76 | thread::sleep(time::Duration::from_secs(self.retry_delay)); 77 | } 78 | } else { 79 | return result; 80 | } 81 | } 82 | } 83 | 84 | /// default secure port, used by serde 85 | pub fn default_secure() -> Secure { 86 | Secure::Yes(993) 87 | } 88 | /// default max retries number, used by serde 89 | pub fn default_retry_max_count() -> u8 { 90 | 3 91 | } 92 | /// default retry delay, used by serde 93 | pub fn default_retry_delay() -> u64 { 94 | 1 95 | } 96 | /// Constructs a default email config, used in Settings by serde 97 | pub fn default() -> Email { 98 | Email { 99 | server: "Set your email server address here".to_owned(), 100 | user: "Set your imap server user name (it may be your email address or not)".to_owned(), 101 | password: "Set your imap server password (yup, in clear, this is very bad)".to_owned(), 102 | secure: Email::default_secure(), 103 | retry_max_count: Email::default_retry_max_count(), 104 | retry_delay: Email::default_retry_delay(), 105 | } 106 | } 107 | 108 | /// starts connection to selected imap server, whatever it is 109 | pub fn start(&self) -> Imap { 110 | match self.secure { 111 | Secure::Yes(port) => self.start_secure(port), 112 | Secure::No(_port) => panic!("rrss2map no more supports unsecured connection to IMAP server due to evolutions of IMAP library (see https://github.com/jonhoo/rust-imap/pull/140)"), 113 | } 114 | } 115 | 116 | fn start_secure(&self, port: u16) -> Imap { 117 | let tls = native_tls::TlsConnector::builder() 118 | .build() 119 | .expect("Couldn't create TLS connector"); 120 | 121 | // we pass in the domain twice to check that the server's TLS 122 | // certificate is valid for the domain we're connecting to. 123 | let client = imap::connect((self.server.as_str(), port), &self.server, &tls) 124 | .unwrap_or_else(|_| panic!("Couldn't connect to {}:{}", self.server, port)); 125 | 126 | // the client we have here is unauthenticated. 127 | // to do anything useful with the e-mails, we need to log in 128 | let imap_session = client 129 | .login(&self.user, &self.password) 130 | .unwrap_or_else(|_| { 131 | panic!( 132 | "Couldn't securely connect to {}:{} for login {}", 133 | self.server, port, self.user 134 | ) 135 | }); 136 | 137 | debug!( 138 | "Successfully connected to SECURE imap server {}", 139 | self.server 140 | ); 141 | Imap::Secured(imap_session) 142 | } 143 | } 144 | 145 | /// Store-level config 146 | #[derive(Debug, Deserialize, Serialize)] 147 | pub struct Settings { 148 | /// when set to true, no reading statis will be persisted. 149 | /// As a consequence, messages may be read more than once 150 | #[serde( 151 | skip_serializing_if = "Settings::is_false", 152 | default = "Settings::default_false" 153 | )] 154 | pub do_not_save: bool, 155 | /// inline all images as base64 data 156 | /* #[serde( 157 | skip_serializing_if = "Settings::is_false", 158 | default = "Settings::default_false" 159 | )] 160 | pub inline_image_as_data: bool, 161 | */ 162 | #[serde(default = "Email::default")] 163 | pub email: Email, 164 | #[serde(default = "Config::new")] 165 | pub config: Config, 166 | } 167 | 168 | impl Settings { 169 | pub fn is_false(value: &bool) -> bool { 170 | !value 171 | } 172 | pub fn default_false() -> bool { 173 | false 174 | } 175 | /* 176 | pub fn is_true(value: &bool) -> bool { 177 | !!value 178 | } 179 | pub fn default_true() -> bool { 180 | true 181 | } 182 | */ 183 | pub fn default() -> Settings { 184 | Settings { 185 | do_not_save: false, 186 | email: Email::default(), 187 | config: Config::new(), 188 | } 189 | } 190 | } 191 | -------------------------------------------------------------------------------- /src/store.rs: -------------------------------------------------------------------------------- 1 | extern crate directories; 2 | 3 | use directories::ProjectDirs; 4 | use tests_bin::unit_tests; 5 | use std::path::{PathBuf, Path}; 6 | 7 | use std::fs; 8 | use std::fs::File; 9 | use std::io::Read; 10 | 11 | 12 | use super::export; 13 | use super::feed::Feed; 14 | use super::import; 15 | use super::settings::Settings; 16 | 17 | use rayon::prelude::*; 18 | 19 | use custom_error::custom_error; 20 | 21 | custom_error!{pub UnusableStore 22 | IO{source:std::io::Error} = "input/output error", 23 | JsonParseError{source:serde_json::Error} = "Can't parse JSON content of store" 24 | } 25 | 26 | #[unit_tests("store.rs")] 27 | /// Main application structure. 28 | /// This structure is read/written from/to a JSON file 29 | #[derive(Debug, Deserialize, Serialize)] 30 | pub struct Store { 31 | /// Contains all application settings 32 | pub settings: Settings, 33 | /// Contains all feeds being read 34 | pub feeds: Vec, 35 | #[serde(skip)] 36 | pub dirty:bool, 37 | #[serde(skip)] 38 | pub path: PathBuf 39 | } 40 | 41 | /// Name of the file from which config is read/written. As of today, this name is not expected to change. 42 | pub const STORE: &str = "config.json"; 43 | 44 | /// Calculate the location of the `config.json` store file. 45 | /// If `config.json` is found in the current directory, use it for backward 46 | /// compatibility. Otherwise, return a path inside the project directory 47 | /// (~/.config/rrss2imap/ on Linux, system-specific on macOS and Windows). 48 | pub fn find_store() -> PathBuf { 49 | let mut path = PathBuf::from(STORE); 50 | if !path.exists() { 51 | // The current directory takes precedence over project directory 52 | // for existing configurations for backward compatibility. 53 | if let Some(proj_dirs) = ProjectDirs::from("org", "Rrss2imap", "rrss2imap") { 54 | path = proj_dirs.config_dir().to_path_buf(); 55 | path.push(STORE); 56 | } 57 | } 58 | path 59 | } 60 | 61 | impl Store { 62 | /// Initialize a Store object from a config file at the given path. If the 63 | /// config file does not exist, return a Store object with default values. 64 | pub fn load(path: &PathBuf) -> Result { 65 | if path.exists() { 66 | info!("Reading config file {}", path.to_string_lossy()); 67 | // First read the file 68 | let mut file = File::open(path)?; 69 | let mut contents = String::new(); 70 | file.read_to_string(&mut contents)?; 71 | // Then deserialize its content 72 | let mut store: Store = 73 | serde_json::from_str(&contents)?; 74 | store.path = path.to_owned(); 75 | // And return it 76 | Ok(store) 77 | } else { 78 | info!("Using fresh config file {}", path.to_string_lossy()); 79 | Ok(Store { 80 | settings: Settings::default(), 81 | feeds: vec![], 82 | dirty: false, 83 | path: path.to_owned() 84 | }) 85 | } 86 | } 87 | 88 | /// Save all informations in the store file 89 | fn save(&self) { 90 | info!("Saving config file {}", self.path.to_string_lossy()); 91 | let serialized = serde_json::to_string_pretty(self).expect("Can't serialize Store to JSON"); 92 | let directory = self.path.parent().unwrap_or(Path::new(".")); 93 | fs::create_dir_all(directory) 94 | .unwrap_or_else(|_| panic!("Unable to create directory for file {}", self.path.to_string_lossy())); 95 | fs::write(&self.path, serialized) 96 | .unwrap_or_else(|_| panic!("Unable to write file {}", self.path.to_string_lossy())); 97 | } 98 | 99 | /// Create a new configuration file with the given email. 100 | pub fn init_config(&mut self, email: String) { 101 | if self.path.exists() { 102 | warn!("Config file {} already exists, leaving it unchanged.", self.path.to_string_lossy()); 103 | } else { 104 | println!("Config file {} created, please edit it to finish configuration.", self.path.to_string_lossy()); 105 | self.settings.config.email = Some(email); 106 | self.dirty = true; 107 | self.save(); 108 | } 109 | } 110 | 111 | /// Set a new value for email and save file (prior to obviously exiting) 112 | pub fn set_email(&mut self, email: String) { 113 | self.settings.config.email = Some(email); 114 | self.dirty = true; 115 | self.save(); 116 | } 117 | 118 | /// Exports config into an OPML file 119 | /// see [export](rrss2imap::export::export) for implementation details 120 | pub fn export(&self, file: Option) { 121 | let path_to_write = file.expect("Can't export file if no file is given"); 122 | warn!("exporting content to {:?}", path_to_write); 123 | export::export(&path_to_write, self); 124 | info!("exported feeds to {:?}", path_to_write); 125 | } 126 | 127 | /// Import rss feeds provided as an opml file 128 | /// see [import](rrss2imap::import::import) for implementation details 129 | pub fn import(&mut self, file: Option) { 130 | let path_to_read = file.expect("Can't import file if no file is given"); 131 | info!("importing content from {:?}", path_to_read); 132 | let count = self.feeds.len(); 133 | import::import(&path_to_read, self); 134 | self.dirty = true; 135 | info!( 136 | "imported {} feeds from {:?}", 137 | self.feeds.len() - count, 138 | path_to_read 139 | ); 140 | } 141 | 142 | /// Add a feed to the feeds list and immediatly save the store. 143 | pub fn add(&mut self, url:Option, email:Option, destination:Option, inline:bool, parameters: Vec) { 144 | let to_add:Feed = if url.is_some() { 145 | Feed::from_all(url, email, destination, inline) 146 | } else { 147 | Feed::from_vec(parameters) 148 | }; 149 | info!("adding \"{:?}\"", to_add); 150 | self.add_feed(to_add); 151 | self.dirty = true; 152 | } 153 | 154 | /// Delete the feed which id is given as parameter. 155 | /// The use of a number is a compatibility requirement 156 | pub fn delete(&mut self, feed: u32) { 157 | let f = self.feeds.remove(feed as usize); 158 | self.dirty = true; 159 | info!("Removed {:?}", f); 160 | } 161 | 162 | /// Reset the config file by removing all feeds and config 163 | pub fn reset(&mut self) { 164 | self.feeds.clear(); 165 | self.settings.config.clear(); 166 | self.dirty = true; 167 | info!("store has been cleared to contain only {:?}", self); 168 | } 169 | 170 | /// Run all rss to imap transformation 171 | /// Each feed is read and immediatly written in this thread. 172 | /// This should be rewritten to allow optimization/parallelism 173 | pub fn run(&mut self) { 174 | self.dirty = true; 175 | let feeds_length = self.feeds.len(); 176 | // Initialize mail server before processing feeds 177 | self.feeds = self.feeds 178 | .par_iter().enumerate() 179 | .map(|element| (element.1, element.1.read(element.0, &feeds_length, ))) 180 | .map(|(feed, messages)| feed.write_new_messages(&self.settings, messages)) 181 | .collect::>(); 182 | } 183 | 184 | /// Prints all the feeds to stdout. 185 | /// This is done in a way compatible with rss2imap original layout. 186 | /// As a consequence, new elements (like image inlining) are not visible 187 | pub fn list(&self) { 188 | let lines: Vec = self 189 | .feeds 190 | .iter() 191 | .enumerate() 192 | .map(|(i, f)| format!("{} : {}", i, f.to_string(&self.settings.config))) 193 | .collect(); 194 | println!("{}", &lines.join("\n")); 195 | } 196 | 197 | /// If the feed url is not already in the store, adds it 198 | pub fn add_feed(&mut self, to_add: Feed) { 199 | // We never add the same feed twice. To ensure that, we check that no feed has the same url 200 | let tested = self.feeds.clone(); 201 | let already_existing: Vec<&Feed> = tested.iter().filter(|f| f.url == to_add.url).collect(); 202 | if already_existing.is_empty() { 203 | self.feeds.push(to_add); 204 | } else { 205 | error!( 206 | "We already read this feed with the following configuration {:?}", 207 | already_existing 208 | ); 209 | } 210 | } 211 | } 212 | 213 | impl Drop for Store { 214 | fn drop(&mut self) { 215 | if self.dirty { 216 | if self.settings.do_not_save { 217 | error!("do_not_save flag is set in config.json. NOT SAVING {} !", self.path.to_string_lossy()) 218 | } else { 219 | info!("store has been modified. Saving {} !", self.path.to_string_lossy()); 220 | self.save(); 221 | } 222 | } 223 | } 224 | } 225 | -------------------------------------------------------------------------------- /src/syndication.rs: -------------------------------------------------------------------------------- 1 | /// This is a shameless copy of https://github.com/tomshen/rust-syndication to have it working with recent versions of 2 | /// both RSS and atom crates 3 | use std::str::FromStr; 4 | 5 | /// Possible feeds types 6 | pub enum Feed { 7 | Atom(atom_syndication::Feed), 8 | RSS(rss::Channel), 9 | } 10 | 11 | /// Parse a value to a feed. 12 | impl FromStr for Feed { 13 | type Err = &'static str; 14 | 15 | /// Each supported enum value is tested after the other. 16 | /// We first try to load atom, then RSS. 17 | /// If none work, an error is returned 18 | fn from_str(s: &str) -> Result { 19 | match s.parse::() { 20 | Ok(feed) => Ok(Feed::Atom(feed)), 21 | _ => match s.parse::() { 22 | Ok(channel) => Ok(Feed::RSS(channel)), 23 | _ => Err("Could not parse XML as Atom or RSS from input"), 24 | }, 25 | } 26 | } 27 | } 28 | 29 | impl ToString for Feed { 30 | fn to_string(&self) -> String { 31 | match *self { 32 | Feed::Atom(ref atom_feed) => atom_feed.to_string(), 33 | Feed::RSS(ref rss_channel) => rss_channel.to_string(), 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /tests/unit/example.atom: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Example Feed 5 | 6 | 2003-12-13T18:30:02Z 7 | 8 | John Doe 9 | 10 | urn:uuid:60a76c80-d399-11d9-b93C-0003939e0af6 11 | 12 | 13 | Atom-Powered Robots Run Amok 14 | 15 | urn:uuid:1225c695-cfb8-4ebb-aaaa-80da344efa6a 16 | 2003-12-13T18:30:02Z 17 | Some text. 18 | 19 | 20 | -------------------------------------------------------------------------------- /tests/unit/example.rss: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | RSS Title 5 | This is an example of an RSS feed 6 | http://www.example.com/main.html 7 | 2020 Example.com All rights reserved 8 | Mon, 6 Sep 2010 00:01:00 +0000 9 | Sun, 6 Sep 2009 16:20:00 +0000 10 | 1800 11 | 12 | 13 | Example entry 14 | Here is some text containing an interesting description. 15 | http://www.example.com/blog/post/1 16 | 7bd204c6-1655-4c27-aeee-53f933c5395f 17 | Sun, 6 Sep 2009 16:20:00 +0000 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /tests/unit/feed.rs: -------------------------------------------------------------------------------- 1 | extern crate spectral; 2 | use spectral::prelude::*; 3 | use super::*; 4 | 5 | #[test] 6 | fn can_build_feed_from_just_a_url() { 7 | assert_that!( 8 | Feed::from_vec(vec!["example.com".to_string()]) 9 | ).is_equal_to( 10 | Feed { 11 | url: "example.com".to_string(), 12 | config: Config { 13 | email: None, 14 | folder: None, 15 | from: None, 16 | inline_image_as_data: false 17 | }, 18 | last_updated: Feed::at_epoch(), 19 | last_message: None 20 | }) 21 | } 22 | 23 | #[test] 24 | fn can_build_feed_from_url_and_email() { 25 | assert_that!( 26 | Feed::from_vec(vec!["a@example.com".to_string(),"example.com".to_string()]) 27 | ).is_equal_to( 28 | Feed { 29 | url: "example.com".to_string(), 30 | config: Config { 31 | email: Some("a@example.com".to_string()), 32 | folder: None, 33 | from: None, 34 | inline_image_as_data: false 35 | }, 36 | last_updated: Feed::at_epoch(), 37 | last_message: None 38 | }) 39 | } 40 | 41 | #[test] 42 | fn can_build_feed_from_url_and_folder() { 43 | assert_that!( 44 | Feed::from_vec(vec!["folder".to_string(), "example.com".to_string()]) 45 | ).is_equal_to( 46 | Feed { 47 | url: "example.com".to_string(), 48 | config: Config { 49 | email: None, 50 | folder: Some("folder".to_string()), 51 | from: None, 52 | inline_image_as_data: false 53 | }, 54 | last_updated: Feed::at_epoch(), 55 | last_message: None 56 | }) 57 | } 58 | 59 | #[test] 60 | fn can_build_feed_from_url_email_and_folder() { 61 | assert_that!( 62 | Feed::from_vec(vec!["a@b.c".to_string(), "folder".to_string(), "example.com".to_string()]) 63 | ).is_equal_to( 64 | Feed { 65 | url: "example.com".to_string(), 66 | config: Config { 67 | email: None, 68 | folder: Some("folder".to_string()), 69 | from: None, 70 | inline_image_as_data: false 71 | }, 72 | last_updated: Feed::at_epoch(), 73 | last_message: None 74 | }) 75 | } 76 | 77 | /// Makes sure we can parse the feed given in https://validator.w3.org/feed/docs/atom.html 78 | #[test] 79 | fn can_read_an_atom_feed() { 80 | let feed = Feed::from_vec(vec!["a@b.c".to_string()]); 81 | let messages = feed.read_response_text(include_str!("example.atom").to_string()); 82 | assert_that!(messages) 83 | .has_length(1) 84 | ; 85 | let first = &messages[0]; 86 | assert_that!(first.content).is_equal_to("Some text.".to_string()); 87 | 88 | } 89 | 90 | 91 | /// Makes sure we can parse the feed given in https://en.wikipedia.org/wiki/RSS?useskin=vector#Example 92 | #[test] 93 | fn can_read_a_rss_feed() { 94 | let feed = Feed::from_vec(vec!["a@b.c".to_string()]); 95 | let messages = feed.read_response_text(include_str!("example.rss").to_string()); 96 | assert_that!(messages) 97 | .has_length(1) 98 | ; 99 | let first = &messages[0]; 100 | assert_that!(first.content).is_equal_to("Here is some text containing an interesting description.".to_string()); 101 | 102 | } -------------------------------------------------------------------------------- /tests/unit/feed_utils/can_sanitize_email.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | #[test] 4 | fn can_create_email_from_flo() { 5 | assert_eq!( 6 | ("F(lo)".to_string(), "f_lo_@linuxfr.org".to_string()), 7 | sanitize_email(&"F(lo)".to_string(), &"linuxfr.org".to_string()) 8 | ); 9 | } 10 | 11 | #[test] 12 | fn can_create_email_from_blog_a_part() { 13 | assert_eq!( 14 | ("Blog à part".to_string(), "blog___part@alias.erdorin.org".to_string()), 15 | sanitize_email(&"Blog à part".to_string(), &"alias.erdorin.org".to_string()) 16 | ); 17 | } 18 | 19 | #[test] 20 | fn can_create_email_from_xkcd() { 21 | assert_eq!( 22 | ("xkcd.com".to_string(), "xkcd.com@xkcd.com".to_string()), 23 | sanitize_email(&"xkcd.com".to_string(), &"xkcd.com".to_string()) 24 | ); 25 | } 26 | 27 | #[test] 28 | fn can_create_email_from_sex_at_liberation() { 29 | assert_eq!( 30 | ("sexes.blogs.liberation.fr".to_string(), "sexes.blogs.liberation.fr@sexes.blogs.liberation.fr".to_string()), 31 | sanitize_email( 32 | &"sexes.blogs.liberation.fr - Derniers articles".to_string(), 33 | &"sexes.blogs.liberation.fr".to_string() 34 | ) 35 | ); 36 | } 37 | 38 | #[test] 39 | fn can_create_email_from_real_address_at_sex_at_liberation() { 40 | assert_eq!( 41 | ("Agnès Giard".to_string(), "aniesu.giard@gmail.com".to_string()), 42 | sanitize_email( 43 | &"aniesu.giard@gmail.com (Agnès Giard)".to_string(), 44 | &"sexes.blogs.liberation.fr".to_string() 45 | ) 46 | ); 47 | } 48 | -------------------------------------------------------------------------------- /tests/unit/feed_utils/can_sanitize_message_authors.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | #[test] 4 | fn can_sanitize_empty_list(){ 5 | assert_eq!(vec![("a".to_string(), "a@example.com".to_string())], 6 | sanitize_message_authors(vec!["a".to_string()], "example.com".to_string())) 7 | } 8 | -------------------------------------------------------------------------------- /tests/unit/feed_utils/can_trim_to_chars.rs: -------------------------------------------------------------------------------- 1 | use super::*; 2 | 3 | #[test] 4 | fn can_trim_empty_vec() { 5 | assert_eq!("", 6 | trim_to_chars("", vec![])); 7 | } 8 | 9 | #[test] 10 | fn can_trim_simple_email() { 11 | assert_eq!( 12 | "a", 13 | trim_to_chars("a@b.c", vec!["@"]) 14 | ) 15 | } -------------------------------------------------------------------------------- /tests/unit/image_to_data.rs: -------------------------------------------------------------------------------- 1 | extern crate spectral; 2 | use spectral::prelude::*; 3 | 4 | use crate::image_to_data::transform; 5 | 6 | #[test] 7 | #[cfg(target_family = "windows")] 8 | fn can_url_encode_simple_image(){ 9 | assert_that!(transform(&" 10 | 11 | 12 | 13 | ".to_string())) 14 | .is_ok_containing(" 15 | 16 | 17 | 18 | ".to_string()); 19 | } 20 | 21 | #[test] 22 | #[cfg(target_family = "unix")] 23 | fn can_url_encode_simple_image(){ 24 | assert_that!(transform(&" 25 | 26 | 27 | 28 | ".to_string())) 29 | .is_ok_containing(" 30 | 31 | 32 | 33 | ".to_string()); 34 | } 35 | -------------------------------------------------------------------------------- /tests/unit/store.rs: -------------------------------------------------------------------------------- 1 | extern crate spectral; 2 | use spectral::prelude::*; 3 | 4 | use std::env; 5 | use crate::{settings::Email, config::Config}; 6 | use std::fs; 7 | use super::*; 8 | 9 | #[test] 10 | fn can_read_store_from_existing_file() { 11 | let mut config_file = env::current_dir().unwrap(); 12 | config_file.push("tests"); 13 | config_file.push("unit"); 14 | config_file.push("store"); 15 | config_file.push("simple_config_store.json"); 16 | assert_that!(config_file) 17 | .is_a_file(); 18 | let store_result = Store::load(&config_file); 19 | assert_that!(store_result) 20 | .is_ok(); 21 | let store = store_result.unwrap(); 22 | assert_that!(store.settings.email) 23 | .is_equal_to(Email { 24 | server: "imap_server".to_string(), 25 | user: "username".to_string(), 26 | password: "password".to_string(), 27 | secure: crate::settings::Secure::Yes(993), 28 | retry_max_count: 3, 29 | retry_delay: 1 30 | }); 31 | assert_that!(store.settings.config) 32 | .is_equal_to(Config { 33 | email: Some("Sender ".to_string()), 34 | folder: Some("default_folder".to_string()), 35 | from: None, 36 | inline_image_as_data: true 37 | }); 38 | assert_that!(store.feeds) 39 | .has_length(1); 40 | } 41 | 42 | 43 | #[test] 44 | fn can_read_store_from_non_existing_file() { 45 | let mut config_file = env::current_dir().unwrap(); 46 | config_file.push("tests"); 47 | config_file.push("unit"); 48 | config_file.push("store"); 49 | config_file.push("can_read_store_from_non_existing_file.json"); 50 | assert_that!(config_file) 51 | .does_not_exist(); 52 | let store_result = Store::load(&config_file); 53 | assert_that!(store_result) 54 | .is_ok(); 55 | let store = store_result.unwrap(); 56 | assert_that!(store.settings.email) 57 | .is_equal_to(Email { 58 | server: "Set your email server address here".to_string(), 59 | user: "Set your imap server user name (it may be your email address or not)".to_string(), 60 | password: "Set your imap server password (yup, in clear, this is very bad)".to_string(), 61 | secure: crate::settings::Secure::Yes(993), 62 | retry_max_count: 3, 63 | retry_delay: 1 64 | }); 65 | assert_that!(store.settings.config) 66 | .is_equal_to(Config { 67 | email: None, 68 | folder: None, 69 | from: None, 70 | inline_image_as_data: false 71 | }); 72 | assert_that!(store.feeds) 73 | .is_equal_to(vec![]) 74 | } 75 | 76 | #[test] 77 | fn bugfix_82_export_is_broken() { 78 | // Given 79 | let mut store_path = env::current_dir().unwrap(); 80 | store_path.push("tests"); 81 | store_path.push("unit"); 82 | store_path.push("store"); 83 | store_path.push("bugfix_82_export_is_broken.json"); 84 | let mut store = Store { 85 | settings: Settings { 86 | do_not_save: false, 87 | email: Email { 88 | server: "imap_server".to_string(), 89 | user: "username".to_string(), 90 | password: "password".to_string(), 91 | secure: crate::settings::Secure::Yes(993), 92 | retry_max_count: 3, 93 | retry_delay: 1 94 | }, 95 | config: Config { 96 | email: Some("Sender ".to_string()), 97 | folder: Some("default_folder".to_string()), 98 | from: None, 99 | inline_image_as_data: true 100 | } 101 | }, 102 | feeds: vec![], 103 | dirty: true, 104 | path: store_path 105 | }; 106 | let mut export_path = env::current_dir().unwrap(); 107 | export_path.push("tests"); 108 | export_path.push("unit"); 109 | export_path.push("store"); 110 | export_path.push("export.opml"); 111 | // Finally, add one feed to the store 112 | store.add_feed(Feed::from_vec(vec!["https://xkcd.com/rss.xml".to_string()])); 113 | // When 114 | store.export(Some(export_path.clone())); 115 | // Then 116 | assert_that!(export_path) 117 | .is_a_file(); 118 | // Read file content to a string 119 | let opml_content = fs::read_to_string(export_path).unwrap(); 120 | assert_that!(opml_content) 121 | .contains("https://xkcd.com/rss.xml") 122 | } -------------------------------------------------------------------------------- /tests/unit/store/bugfix_82_export_is_broken.json: -------------------------------------------------------------------------------- 1 | { 2 | "settings": { 3 | "email": { 4 | "server": "imap_server", 5 | "user": "username", 6 | "password": "password", 7 | "secure": { 8 | "Yes": 993 9 | }, 10 | "retry_max_count": 3, 11 | "retry_delay": 1 12 | }, 13 | "config": { 14 | "email": "Sender ", 15 | "folder": "default_folder", 16 | "inline_image_as_data": true 17 | } 18 | }, 19 | "feeds": [ 20 | { 21 | "url": "https://xkcd.com/rss.xml", 22 | "last_updated": "1970-01-01T00:00:00" 23 | } 24 | ] 25 | } -------------------------------------------------------------------------------- /tests/unit/store/simple_config_store.json: -------------------------------------------------------------------------------- 1 | { 2 | "settings": { 3 | "email": { 4 | "server": "imap_server", 5 | "user": "username", 6 | "password": "password", 7 | "secure": { 8 | "Yes": 993 9 | }, 10 | "retry_max_count": 3, 11 | "retry_delay": 1 12 | }, 13 | "config": { 14 | "email": "Sender ", 15 | "folder": "default_folder", 16 | "inline_image_as_data": true 17 | } 18 | }, 19 | "feeds": [ 20 | { 21 | "url": "https://xkcd.com/rss.xml", 22 | "config": { 23 | "folder": "RSS/others" 24 | }, 25 | "last_updated": "2023-06-05T04:00:00", 26 | "last_message": "https://xkcd.com/2785/" 27 | } 28 | ] 29 | } --------------------------------------------------------------------------------