├── .cargo
└── config.toml
├── .github
└── workflows
│ ├── on_pull_request_generate_coverage_report.yml
│ ├── on_push.yml
│ ├── on_push_apply_clippy.yml
│ ├── on_release_created.yml
│ └── on_tag.yml
├── .gitignore
├── .gitjournal.toml
├── .travis.yml.unused
├── Cargo.lock
├── Cargo.toml
├── README.md
├── samples
├── TheBrothersBrick
└── stackoverflow
├── src
├── config.rs
├── export.rs
├── feed.rs
├── feed_errors.rs
├── feed_reader.rs
├── feed_utils.rs
├── image_to_data.rs
├── import.rs
├── main.rs
├── message.css
├── message.rs
├── settings.rs
├── store.rs
└── syndication.rs
└── tests
└── unit
├── example.atom
├── example.rss
├── feed.rs
├── feed_utils
├── can_sanitize_email.rs
├── can_sanitize_message_authors.rs
└── can_trim_to_chars.rs
├── image_to_data.rs
├── store.rs
└── store
├── bugfix_82_export_is_broken.json
└── simple_config_store.json
/.cargo/config.toml:
--------------------------------------------------------------------------------
1 | [target.armv7-unknown-linux-gnueabihf]
2 | linker="arm-linux-gnueabihf-gcc"
3 | [target.armv7-unknown-linux-gnueabi]
4 | linker="arm-linux-gnueabi-gcc"
5 |
--------------------------------------------------------------------------------
/.github/workflows/on_pull_request_generate_coverage_report.yml:
--------------------------------------------------------------------------------
1 | on:
2 | pull_request:
3 | name: Run coverage report using tarpaulin and generate cobertura-like report
4 | jobs:
5 | test:
6 | name: coverage
7 | runs-on: ubuntu-latest
8 | container:
9 | image: xd009642/tarpaulin:develop-nightly
10 | options: --security-opt seccomp=unconfined
11 | steps:
12 | - name: Checkout repository
13 | uses: actions/checkout@v2
14 |
15 | - name: Generate code coverage
16 | run: |
17 | cargo +nightly tarpaulin --verbose --all-features --workspace --timeout 120 --out Xml
18 | - name: Use coverage report
19 | uses: 5monkeys/cobertura-action@master
20 | with:
21 | path: cobertura.xml
22 | minimum_coverage: 10
23 |
--------------------------------------------------------------------------------
/.github/workflows/on_push.yml:
--------------------------------------------------------------------------------
1 | name: build Rust on push
2 |
3 | on:
4 | push:
5 | branches:
6 | - master
7 | pull_request:
8 | branches:
9 | - master
10 |
11 | jobs:
12 | Run_cargo_tests:
13 | name: Test rrss2imap
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@master
17 | # see https://github.com/marketplace/actions/rust-cargo
18 | # - uses: actions-rs/cargo@v1
19 | # with:
20 | # command: test
21 | # args: --all-features
22 |
23 | Standard_OS_build:
24 |
25 | name: Build ${{ matrix.config.name }}
26 | runs-on: ${{ matrix.config.os }}
27 | strategy:
28 | matrix:
29 | config:
30 | # See https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners
31 | - { os: ubuntu-latest, name: rrss2imap_linux, path: target/debug/rrss2imap}
32 | - { os: macOS-latest, name: rrss2imap_macOS, path: target/debug/rrss2imap}
33 | - { os: windows-latest, name: rrss2imap.exe, path: target/debug/rrss2imap.exe}
34 | # And this one is the dreaded Raspbian one ...
35 | - { os: ubuntu-latest, name: rrss2imap_raspbian, path: target/armv7-unknown-linux-gnueabihf/debug/rrss2imap, target: armv7-unknown-linux-gnueabihf, linker: gcc-arm-linux-gnueabihf}
36 | steps:
37 | - name: Install linker
38 | run: sudo apt-get update && sudo apt-get install ${{matrix.config.linker}}
39 | if: matrix.config.linker!=null
40 | - uses: actions-rs/toolchain@v1.0.6
41 | with:
42 | toolchain: stable
43 | target: ${{matrix.config.target}}
44 | override: true
45 | if: matrix.config.target!=null
46 | - uses: actions-rs/toolchain@v1.0.6
47 | with:
48 | toolchain: stable
49 | if: matrix.config.target==null
50 | - uses: actions/checkout@master
51 | # see https://github.com/marketplace/actions/rust-cargo
52 | - uses: actions-rs/cargo@v1.0.1
53 | with:
54 | command: build
55 | # temp
56 | args: --all-features
57 | if: matrix.config.target==null
58 | - uses: actions-rs/cargo@v1.0.1
59 | with:
60 | use-cross: true
61 | command: build
62 | args: --all-features --target ${{matrix.config.target}}
63 | if: matrix.config.target!=null
64 | - name: Upload build result for OS
65 | uses: actions/upload-artifact@v1
66 | with:
67 | name: ${{matrix.config.name}}
68 | path: ${{matrix.config.path}}
69 | needs: Run_cargo_tests
70 |
--------------------------------------------------------------------------------
/.github/workflows/on_push_apply_clippy.yml:
--------------------------------------------------------------------------------
1 | on:
2 | push:
3 | branches:
4 | - master
5 | name: Apply clippy and PR changes
6 | jobs:
7 | clippy_apply:
8 | runs-on: ubuntu-latest
9 | steps:
10 | - uses: actions/checkout@v2
11 | - uses: actions-rs/toolchain@v1
12 | with:
13 | toolchain: nightly
14 | components: clippy
15 | override: true
16 | - run: rustup component add clippy
17 | - run: cargo clippy --fix -Z unstable-options
18 | - name: Create Pull Request
19 | uses: peter-evans/create-pull-request@v3
20 | with:
21 | token: ${{ secrets.GITHUB_TOKEN }}
22 | branch-suffix: timestamp
23 | commit-message: "style(lint): automatically applied clippy lint"
24 | body: Automated changes from clippy
25 | title: "Automatic lint from clippy"
26 |
27 |
--------------------------------------------------------------------------------
/.github/workflows/on_release_created.yml:
--------------------------------------------------------------------------------
1 | name: Upload all artifacts to release
2 |
3 | on:
4 | release:
5 | types:
6 | - created
7 | - published
8 | jobs:
9 | Standard_OS_build:
10 |
11 | name: Build ${{ matrix.config.name }}
12 | runs-on: ${{ matrix.config.os }}
13 | strategy:
14 | matrix:
15 | config:
16 | # See https://help.github.com/en/actions/reference/virtual-environments-for-github-hosted-runners
17 | - { os: ubuntu-latest, name: rrss2imap_linux, path: target/release/rrss2imap}
18 | - { os: macOS-latest, name: rrss2imap_macOS, path: target/release/rrss2imap}
19 | - { os: windows-latest, name: rrss2imap.exe, path: target/release/rrss2imap.exe}
20 | # And this one is the dreaded Raspbian one ...
21 | - { os: ubuntu-latest, name: rrss2imap_raspbian, path: target/armv7-unknown-linux-gnueabihf/release/rrss2imap, target: armv7-unknown-linux-gnueabihf, linker: gcc-arm-linux-gnueabihf}
22 | steps:
23 | - name: Install linker
24 | run: sudo apt-get update && sudo apt-get install ${{matrix.config.linker}}
25 | if: matrix.config.linker!=null
26 | - uses: actions-rs/toolchain@v1.0.6
27 | with:
28 | toolchain: stable
29 | target: ${{matrix.config.target}}
30 | override: true
31 | if: matrix.config.target!=null
32 | - uses: actions-rs/toolchain@v1.0.6
33 | with:
34 | toolchain: stable
35 | if: matrix.config.target==null
36 | - uses: actions/checkout@master
37 | # see https://github.com/marketplace/actions/rust-cargo
38 | - uses: actions-rs/cargo@v1.0.1
39 | with:
40 | command: build
41 | args: --release --all-features
42 | if: matrix.config.target==null
43 | - uses: actions-rs/cargo@v1.0.1
44 | with:
45 | use-cross: true
46 | command: build
47 | args: --release --all-features --target ${{matrix.config.target}}
48 | if: matrix.config.target!=null
49 | - name: Upload matrix release asset
50 | uses: actions/upload-release-asset@v1.0.2
51 | env:
52 | GITHUB_TOKEN: ${{ secrets.RELEASE_SECRET }}
53 | with:
54 | upload_url: ${{ github.event.release.upload_url }}
55 | asset_name: ${{matrix.config.name}}
56 | asset_path: ${{matrix.config.path}}
57 | asset_content_type: application/octet-stream
58 |
--------------------------------------------------------------------------------
/.github/workflows/on_tag.yml:
--------------------------------------------------------------------------------
1 | name: Create release on tag
2 |
3 | on:
4 | push:
5 | tags:
6 | - '[0-9]+.[0-9]+.[0-9]+'
7 | - 'v[0-9]+.[0-9]+.[0-9]+'
8 |
9 | jobs:
10 | build:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v3
14 | - name: Create a Release
15 | uses: softprops/action-gh-release@v1
16 | with:
17 | draft: true
18 | token: ${{ secrets.RELEASE_SECRET }}
19 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .project
2 | .vscode
3 | /target
4 | **/*.rs.bk
5 | config.json
6 | test-results/
7 | cobertura.xml
--------------------------------------------------------------------------------
/.gitjournal.toml:
--------------------------------------------------------------------------------
1 | categories = ["Added", "Changed", "Fixed", "Improved", "Removed"]
2 | category_delimiters = ["[", "]"]
3 | colored_output = true
4 | enable_debug = true
5 | excluded_commit_tags = []
6 | enable_footers = false
7 | show_commit_hash = false
8 | show_prefix = false
9 | sort_by = "date"
10 | template_prefix = ""
11 |
--------------------------------------------------------------------------------
/.travis.yml.unused:
--------------------------------------------------------------------------------
1 | services:
2 | - docker
3 | language: rust
4 | rust:
5 | - stable
6 | cache: cargo
7 |
8 | # All Rust build architectures are defined here
9 | matrix:
10 | include:
11 | - env: DEBUG=debug CROSS=cross TARGET=x86_64-unknown-linux-gnu
12 | os: linux
13 | - env: DEBUG=debug TARGET=i686-apple-darwin
14 | os: osx
15 | osx_image: xcode10
16 | - env: DEBUG=debug TARGET=x86_64-apple-darwin
17 | os: osx
18 | osx_image: xcode10
19 | - env: TARGET=x86_64-pc-windows-msvc
20 | os: windows
21 | - env: DEBUG=debug CROSS=cross TARGET=armv7-unknown-linux-gnueabihf
22 | os: linux
23 | addons:
24 | apt:
25 | packages:
26 | - gcc-arm-linux-gnueabihf
27 |
28 | # part shamelessly borrowed from https://github.com/Enet4/nifti-rs/blob/438538bfffa2347ece5a09c2a37c0c407ec6fbee/.travis.yml
29 | before_script:
30 | - export PATH="$PATH:$HOME/.cargo/bin"
31 | - rustup target add $TARGET || true
32 | - if [ ! -z "$CROSS" ]; then
33 | cargo install cross --force;
34 | export CARGO_CMD="cross";
35 | else
36 | export CARGO_CMD=cargo;
37 | fi
38 |
39 | # This is the script that will be run on each matrix element
40 | script:
41 | - |
42 | if [ $TARGET = "x86_64-unknown-linux-gnu" ]; then
43 | echo "Running on $TARGET, so running tests!"
44 | cargo test
45 | fi
46 | - if [ ! -z "$DEBUG" ]; then
47 | $CARGO_CMD build --target $TARGET --verbose
48 | fi
49 | - $CARGO_CMD build --target $TARGET --verbose --release
50 | - mkdir -p target/executable
51 | - ls -la target/${TARGET}
52 | - cp target/${TARGET}/debug/rrss2imap target/executable/rrss2imap-${TARGET}-debug
53 | - cp target/${TARGET}/release/rrss2imap target/executable/rrss2imap-${TARGET}
54 | - ls -la target/executable
55 |
56 | # Once the Rust packages are built, here they are deployed
57 | deploy:
58 | provider: releases
59 | api_key: ${GITHUB_OAUTH}
60 | file_glob: true
61 | file: target/executable/*
62 | skip_cleanup: true
63 | overwrite: true
64 | # This way, the release is not directly visible
65 | draft: true
66 | verbose: true
67 | # Release name on body
68 | name: "$TRAVIS_TAG"
69 | # Body is created by git journal !
70 | # body: "$JOURNAL"
71 | on:
72 | repo: Riduidel/rrss2imap
73 | tags: true
74 |
75 | branches:
76 | except:
77 | - "/^untagged/"
78 |
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "rrss2imap"
3 | version = "0.5.2"
4 | authors = ["Nicolas Delsaux "]
5 | description = "A simple script that exposes RSS entries as mail messages, pushed directly using IMAP"
6 | homepage = "https://github.com/Riduidel/rrss2imap"
7 | repository = "https://github.com/Riduidel/rrss2imap"
8 | readme = "README.md"
9 | keywords = ["RSS", "Atom", "IMAP", "command-line", "script"]
10 | categories = ["command-line-utilities", "email"]
11 | license = "GPL-3.0-or-later"
12 | edition = "2018"
13 | # This is only valid for the archive available in crates.io, not for the generated executable
14 | include = [ "templates/*", "src/**/*", "Cargo.toml" ]
15 | # This allows renaming of tag name to be consistent with already long history of rrss2imap versions
16 |
17 | [profile.release]
18 | opt-level = "z" # Optimize for size.
19 | lto = true
20 |
21 | [badges]
22 | travis-ci = { repository = "Riduidel/rrss2imap", branch = "master" }
23 | is-it-maintained-issue-resolution = { repository = "riduidel/rrss2imap" }
24 | is-it-maintained-open-issues = { repository = "riduidel/rrss2imap" }
25 | maintenance = { status = "actively-developed" }
26 |
27 | [dependencies]
28 | # logging interface for Rust
29 | log = "0.4"
30 | # chosen logger implementation allowing easy configuration
31 | flexi_logger = "0.25"
32 | # Used for parsing command line args
33 | structopt = "0.3"
34 | # Used for reading/writing config file
35 | serde = "1.0"
36 | # macro implementation for serde easy usage
37 | serde_derive = "1.0"
38 | # allow reading and writing to json
39 | serde_json = "1.0"
40 | # Used for import/export TODO replace by quick-xml
41 | treexml = "0.7"
42 | # Used to get feed entries (and images, when it will be possible)
43 | atom_syndication = "0.12"
44 | rss = "2.0"
45 | # time handling
46 | chrono = { version = "0.4", features = ["serde"] }
47 | # Fixing poorly formatted dates !
48 | rfc822_sanitizer = "0.3"
49 | # And an imap connector, obviously
50 | imap = "2.3"
51 | native-tls = "0.2"
52 | # Allows to easily start tera
53 | lazy_static = "1.4"
54 | lol_html = "1.0"
55 | base64 = "0.21"
56 | # A lightweight http client (with no default support for async/await)
57 | ureq = {version = "2.6", features = ["native-tls", "native-certs"]}
58 | xhtmlchardet = "2.1"
59 | human-panic = "1.0"
60 | url = "2.1"
61 | tree_magic_mini = "3.0"
62 | mail-builder = "0.3"
63 | unidecode = "0.3"
64 | regex = "1.5"
65 | custom_error = "1.8"
66 | directories = "5.0"
67 | tests_bin = "1.0"
68 | rayon = "1.7"
69 |
70 | [dev-dependencies]
71 | assert_cli = "0.6"
72 | spectral = "0.6"
73 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # rrss2imap
2 |
3 | [](https://sagiegurari.github.io/cargo-make)
4 | [](https://travis-ci.org/Riduidel/rrss2imap)
5 |
6 | rrss2imap is a Rust reimplementation of the classical Python script [rss2imap](https://github.com/rcarmo/rss2imap)
7 |
8 | Goals of this project include
9 |
10 | * ✅ Having a reasonably performant implementation of rss2imap (by performant I mean able to run without problem on my Raspberry)
11 | * ✅ Learn Rust
12 | * ✅ Explore parallel mechanism (thanks Rayon)
13 | * ✅ Maybe provide some kind of image embedding (DONE)
14 |
15 | ## Getting Started
16 |
17 | ### Download rrss2imap
18 |
19 | rrss2imap can be downloaded from [**releases page**](https://github.com/Riduidel/rrss2imap/releases).
20 | If there is no release for your platform, you can fill an issue ... or if you know Travis, you can even add your platform to `.travis.yml`.
21 |
22 | ### As a user
23 |
24 |
25 |
26 | Application transforming rss feeds into email by directly pushing the entries into IMP folders.
27 | This application is an adaption of the rss2imap Python script to Rust.
28 |
29 | #### How to use ?
30 |
31 | The simplest way to understand what to do is just to run `rrss2imap --help`
32 |
33 | It should output something like
34 |
35 | FLAGS:
36 | -h, --help Prints help information
37 | -V, --version Prints version information
38 |
39 | SUBCOMMANDS:
40 | add Adds a new feed given its url
41 | delete Delete the given feed
42 | email Changes email address used in feed file to be the given one
43 | export Export subscriptions as opml file
44 | help Prints this message or the help of the given subcommand(s)
45 | import import the given opml file into subscriptions
46 | list List all feeds configured
47 | new Creates a new feedfile with the given email address
48 | reset Reset feedfile (in other words, remove everything)
49 | run Run feed parsing and transformation
50 |
51 | Which give you a glimpse of what will happen
52 |
53 | Each of these commands also provide some help, when run with the same `--help` flag.
54 |
55 | The important operations to memorize are obviously
56 |
57 | #### `rrss2imap new`
58 |
59 | Creates a new `config.json` file. At init time, the config file will only contains `settings` element
60 | with the email address set. You **have** to set
61 |
62 | * the used imap server
63 | ** with user login and password
64 | ** and security settings (secure should contain `{"Yes": secure port}` for imap/s
65 | or `{"No": unsecure port}` for simple imap)
66 | * the default config
67 | ** folder will be the full path to an imap folder where entries will fall in
68 | ** email will be the recipient email address (which may not be yours for easier filtering)
69 | ** Base64 image inlining
70 | * feeds is the list of all rss feeds that can be added
71 |
72 | #### `rrss2imap add`
73 |
74 | This command will add a new feed to your config. You can directly set here the email recipient as well as the folder
75 | (but not the base64 image inlining parameter)
76 |
77 | #### `rrss2imap run`
78 |
79 | THis is the main command. It will
80 |
81 | 1. get all rss/atom feed contents
82 | 2. List all new entries in these feeds
83 | 3. Transform these entries into valid email messages
84 | 4. Push these mail messages directly on IMAP server
85 |
86 | #### `rrss2imap list`
87 |
88 | Displays a list of the rss feeds. Here is an example
89 |
90 | ```
91 | 0 : http://tontof.net/?rss (to: Nicolas Delsaux (default)) RSS/rrss2imap (default)
92 | 1 : https://www.brothers-brick.com/feed/ (to: Nicolas Delsaux (default)) RSS/rrss2imap (default)
93 | 2 : https://nicolas-delsaux.hd.free.fr/rss-bridge/?action=display&bridge=LesJoiesDuCode&format=AtomFormat (to: Nicolas Delsaux (default)) RSS/rrss2imap (default)
94 | ```
95 |
96 | Please notice that each entry has an associated number, which is the one to enter when running `rrss2imap delete `
97 |
98 | #### `config.json` format
99 |
100 | A typical feedfile will look like this
101 |
102 | ```json
103 | {
104 | "settings": {
105 | "email": {
106 | "server": "the imap server of your mail provider",
107 | "user": "your imap user name",
108 | "password": "your imap user password",
109 | "secure": {
110 | "Yes": 993 // Set to "Yes": port for imaps or "No": port for unsecure imap
111 | }
112 | },
113 | // This config is to be used for all feeds
114 | "config": {
115 | // This is the email address written in each mail sent. It can be different from the email user
116 | "email": "Nicolas Delsaux ",
117 | // This is the imap folder in which mails will be written
118 | "folder": "RSS/rrss2imap"
119 | // Setting this to true will force rrss2imap to transform all images into
120 | // base64. This prevents images from beind downloaded (and is really cool when reading feeds from a smartphone)
121 | // But largely increase each mail size (which can be quite bothering)
122 | "inline_image_as_data": true
123 | }
124 | },
125 | "feeds": [
126 | {
127 | "url": "http://tontof.net/?rss",
128 | // This last updated is updated for each entry and should be enough to have rss items correctly read
129 | "last_updated": "2019-05-04T16:53:15",
130 | "config": {
131 | // each config element can be overwritten at the feed level
132 | }
133 | },
134 | ```
135 |
136 |
137 |
138 |
139 | ### As a developer
140 | * clone this repository
141 | * run `cargo run`
142 |
143 | #### Prerequisites
144 |
145 | You need a complete rust build chain
146 |
147 | To perform a release, you'll also need
148 |
149 | * [cargo release](https://github.com/sunng87/cargo-release)
150 | * [git journal](https://github.com/saschagrunert/git-journal)
151 |
152 | ##### Releasing
153 |
154 | 1. Install cargo release (`cargo install cargo-release`) and git-journal (`cargo install git-journal`)
155 | 1. Run `cargo release`. This will build a version of the code, push it onto crates/io and tag the repository.
156 | Thanks to GitHub Actions (and more specifically the `on_tag.yml` one), once the tag is pushed to GitHub, a release is created.
157 | 1. Publish the release. This will trigger the `on_release_created.yml` which will build executables for the target platforms and attach them to the release.
158 |
159 | And release is done! It was easy, no?
160 |
161 | #### Installing
162 |
163 | 1. Dowload latest version from [Github releases page](https://github.com/Riduidel/rrss2imap/releases)
164 | 1. Run `rrss2imap new` which will create the `config.json`
165 | 1. Fill the missing parts (typically include email configuration)
166 | 1. Run with `rrss2imap run`
167 |
168 | ### Running the tests
169 |
170 | Automated tests can be run with `cargo test`.
171 | Coverage is done thanks to [tarpaulin](https://github.com/xd009642/tarpaulin).
172 | Coverage is also computed during pull requests runs
173 |
174 | ## Built With
175 |
176 | Take a look at Cargo dependencies
177 |
178 | ## Contributing
179 |
180 | Please read [CONTRIBUTING.md](https://gist.github.com/PurpleBooth/b24679402957c63ec426) for details on our code of conduct, and the process for submitting pull requests to us.
181 |
182 | ## Versioning
183 |
184 | We use [SemVer](http://semver.org/) for versioning. For the versions available, see the [tags on this repository](https://github.com/your/project/tags).
185 |
186 | ## Authors
187 |
188 | * **Nicolas Delsaux** - *Initial work* - [Riduidel](https://github.com/Riduidel)
189 |
190 | See also the list of [contributors](https://github.com/Riduidel/rrss2imap/contributors) who participated in this project.
191 |
192 | ## License
193 |
194 | This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details
195 |
196 | ## Acknowledgments
197 |
198 | * [Rui Carmo](https://github.com/rcarmo) for Python implementation of [rss2imap](https://github.com/rcarmo/rss2imap)
199 | * [Aaron Swartz](https://en.wikipedia.org/wiki/Aaron_Swartz) for [RSS](https://en.wikipedia.org/wiki/RSS) (and [rss2email](https://github.com/rss2email/rss2email))
200 |
201 |
--------------------------------------------------------------------------------
/samples/TheBrothersBrick:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | The Brothers Brick
6 |
7 | https://www.brothers-brick.com
8 | World's No. 1 source for LEGO news, reviews, and fan creations.
9 | Fri, 08 Feb 2019 14:00:19 +0000
10 | en-US
11 | hourly
12 | 1
13 | https://wordpress.org/?v=5.0.3
14 | 40578819Subscribe with My Yahoo!Subscribe with NewsGatorSubscribe with My AOLSubscribe with BloglinesSubscribe with NetvibesSubscribe with GoogleSubscribe with Pageflakes
15 | Cadet Thrawn outwits his opponents in the metallurgy lab
16 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/eWF3-ZnktaM/
17 | https://www.brothers-brick.com/2019/02/08/cadet-thrawn-outwits-his-opponents-in-the-metallurgy-lab/#respond
18 | Fri, 08 Feb 2019 14:00:19 +0000
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 | https://www.brothers-brick.com/?p=171427
28 |
29 | While many stories and characters passed into so-called “Legends” status when Disney acquired Lucasfilm, some fan-favorites have been incorporated into the new canon, and I can’t think of any character more deserving than Mitth’raw’nuruodo — or Thrawn, as he is more commonly known in Galactic Basic. The Chiss Grand Admiral had a more humble introduction to Imperial life as a cadet at the military academy on Coruscant in the first book of Timothy Zahn‘s new trilogy. In this jam-packed scene by CRCT Productions built as a RebelLUG collaboration, there are so many great details, not the least of which is the light gray angled walkway bordered by the two-color spring-loaded shooter brick.
30 |
31 |
32 |
Every piece of machinery, from the fabrication unit on the back wall, to the robot arm on the left, looks fully functional, with specific purposes. The lighting is also a very nice touch, giving the scene the stark sterile feel that any Imperial facility deserves. I don’t know if those tools on the back counter have been properly stowed, though I think the approaching officer has other things on his mind, like why these cadets are engaged in gambling activities, which are against regulations.
33 |
34 |
The fabrication unit is worth a closer look, with a bank of computers and a robot arm to create — well, whatever is supposed to get created in a Star Wars metallurgy lab.
35 |
36 | ]]>
37 | https://www.brothers-brick.com/2019/02/08/cadet-thrawn-outwits-his-opponents-in-the-metallurgy-lab/feed/
38 | 0
39 | 171427https://www.brothers-brick.com/2019/02/08/cadet-thrawn-outwits-his-opponents-in-the-metallurgy-lab/
40 |
41 | The LEGO Movie 2’s sewer babies just got bigger
42 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/EcPUFrbS1xA/
43 | https://www.brothers-brick.com/2019/02/08/the-lego-movie-2s-sewer-babies-just-got-bigger/#respond
44 | Fri, 08 Feb 2019 08:00:13 +0000
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 | https://www.brothers-brick.com/?p=171454
55 |
56 | The LEGO Movie 2: The Second Part officially opens today (be sure to read our LEGO Movie 2 review), and to mark the occasion, “Big Daddy” Nelson has taken a few of the movie’s smallest characters and given them a huge makeover. Built in the style of the classic 3723 Creator Minifigure set, these giant sewer babies look just like their miniature counterparts from the TLM2 Accessory Set.
57 |
58 |
They have even more range than the toddler elements they’re based on, featuring double-sided heads and movable hands. They’re also more complex than you might think, with some clever mosaic work needed to translate the prints on the torsos and heads into bricks.
59 |
60 | ]]>
61 | https://www.brothers-brick.com/2019/02/08/the-lego-movie-2s-sewer-babies-just-got-bigger/feed/
62 | 0
63 | 171454https://www.brothers-brick.com/2019/02/08/the-lego-movie-2s-sewer-babies-just-got-bigger/
64 |
65 | Color and light blend beautifully in this Medieval city
66 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/SjjzU5g6xHY/
67 | https://www.brothers-brick.com/2019/02/07/color-and-light-blend-beautifully-in-this-medieval-city/#comments
68 | Fri, 08 Feb 2019 02:00:15 +0000
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 | https://www.brothers-brick.com/?p=171016
78 |
79 | Fantasy castle building often leans towards the creation of dark, gloomy and foreboding places in which one would not lightly tread. Master castle builder Jonas Wide usually takes a different route, however, using cheerful splashes of color to create incredibly warm and welcoming scenes. This style is definitely evident in his latest creation, the Houses of Barqa:
80 |
81 |
The buildings are elegantly designed and laid out, but the real star of this show is the use of color. If there’s been a better use of sand red, I haven’t seen it. The pastel palette blends so well with the more subtle tan/dark tan foundations and street. Taken together, it’s a gorgeous and eye-catching scene. Clever use of lighting also makes for some atmospheric and quite realistic looking images.
82 |
83 |
If Jonas’ city leaves you wishing for more, definitely check out his amazing Streets of Barqa from several years ago or last year’s Aslanic Temple in Barqa.
84 | ]]>
85 | https://www.brothers-brick.com/2019/02/07/color-and-light-blend-beautifully-in-this-medieval-city/feed/
86 | 1
87 | 171016https://www.brothers-brick.com/2019/02/07/color-and-light-blend-beautifully-in-this-medieval-city/
88 |
89 | Stylish sci-fi racer approaches the starting line
90 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/cMVGbcZtImI/
91 | https://www.brothers-brick.com/2019/02/07/stylish-sci-fi-racer-approaches-the-starting-line/#respond
92 | Thu, 07 Feb 2019 20:00:16 +0000
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 | https://www.brothers-brick.com/?p=171371
101 |
102 | When it comes to building a great LEGO model, one thing that really shows off a builder’s skill is the ability to create something that can easily be mistaken for something other than plastic interlocking bricks. This sci-fi racing car by Vince_Toulouse is a perfect blend of smoothly curved details and unique parts, like the troll arms used for the main engine exhaust ports, or the mermaid tails housing the headlights. But by far, my favorite feature is the two-color striping throughout the car, which provides the perfect polish.
103 |
104 | ]]>
105 | https://www.brothers-brick.com/2019/02/07/stylish-sci-fi-racer-approaches-the-starting-line/feed/
106 | 0
107 | 171371https://www.brothers-brick.com/2019/02/07/stylish-sci-fi-racer-approaches-the-starting-line/
108 |
109 | Neo-classic space drill inspection
110 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/H8i3v-M_MvA/
111 | https://www.brothers-brick.com/2019/02/07/neo-classic-space-drill-inspection/#comments
112 | Thu, 07 Feb 2019 14:00:51 +0000
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 | https://www.brothers-brick.com/?p=171357
122 |
123 | Whilst the spacecraft of the classic LEGO space theme seem to grab the nostalgic limelight, for some of us the lunar rovers were the real stars. Maybe Andreas Lenander is trying to make this point, and if he is what better way than through this magnificent Neo-Classic Space Drilling Rover. It’s certainly got my classic space pulse racing. Although it sticks faithfully to the grey and blue colour scheme, its forms and shape speak to a more realistic post-NASA near future. There’s phenomenal part usage too, just look at the way the old rails form the drill casing, and the Jurassic Worldgyrosphere looks as if it were designed to be a moon buggy cab. To complete the scene Andrea signs off with a troop of new pink astronauts, from Benny’s Space Squad, scouring the variegated planet surface for its precious mineral reserves.
124 |
125 | ]]>
126 | https://www.brothers-brick.com/2019/02/07/neo-classic-space-drill-inspection/feed/
127 | 1
128 | 171357https://www.brothers-brick.com/2019/02/07/neo-classic-space-drill-inspection/
129 |
130 | Gazooks! Here come Cahdok and Gahdok!
131 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/4K_F0AZMWkc/
132 | https://www.brothers-brick.com/2019/02/07/gazooks-here-come-cahdok-and-gahdok/#respond
133 | Thu, 07 Feb 2019 08:00:46 +0000
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 | https://www.brothers-brick.com/?p=171370
143 |
144 | Years after being discontiniued, Bionicle remains a strong and very much autonomous theme in LEGO fan builds. Unique pieces and almost complete freedom of angles set it apart from most other styles, but was it always so? Jayfa and Andrew Steele bring us back to 2002, a time when Bionicle was still searching for an identity and was for the most part a sub-theme to Technic. The glorious titan set Cahdok and Gahdok was a load of gears, rubber bands, liftarms and most importantly, play features. I do not think this re-imagining has much of those, but it does capture the spirit of the Bohrok queens.
145 |
146 |
147 |
Jayfa’s and Andrew’s Cahdok and Gahdok are very true to their source, but what I find the most impressive is how consistent the two builders made them. There is a bit of unique style in both of them while still giving the appearance like they came from the same universe. The flowing curves are a stark contrast to the official set, and yet they look incredibly natural. The purple and turquoise hoses under the neck and tail are reminiscent of the Competition theme which has a lot of parallels with early Bionicle play functions and this set in particular.
148 |
149 |
Gahdok has an interesting little detail that I am particularly in love with (besides all sorts of little mechanical bits on the legs) – the hips have red Bohrok shields taken directly from the official set, possibly as a nod to the original design.
150 |
151 |
It is interesting how Jayfa changed the colour scheme of Cahdok. It looks interesting and is probably beter overall, but one could understand that the original set was made with recombination in mind.
152 |
153 | ]]>
154 | https://www.brothers-brick.com/2019/02/07/gazooks-here-come-cahdok-and-gahdok/feed/
155 | 0
156 | 171370https://www.brothers-brick.com/2019/02/07/gazooks-here-come-cahdok-and-gahdok/
157 |
158 | The tree house of your LEGO dreams
159 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/PDtQw6oJwao/
160 | https://www.brothers-brick.com/2019/02/06/the-tree-house-of-your-lego-dreams/#respond
161 | Thu, 07 Feb 2019 02:00:53 +0000
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 | https://www.brothers-brick.com/?p=171328
170 |
171 | Once gain I have the pleasure of highlighting Alanboar Cheung‘s amazing work for TBB, previously sharing his butterfly mimicry and cloud car models. Never predictable, his newest build, a quirky dream treehouse, is inspired by The LEGO Movie 2.
172 |
173 |
Built for the movie’s unique cast of characters, it incorporates a rainbow, clouds, piano room, and even a Unikitty slide — although I’m little worried as to where you’d end up if you actually tried to ride it. Simply exploding with colourful charm and cute details, it’s one of those creations that is going to be just as much fun to play with as is to marvel at. It’s also another reason – as if I needed one – to get excited about seeing the film, which comes out later this week.
174 | ]]>
175 | https://www.brothers-brick.com/2019/02/06/the-tree-house-of-your-lego-dreams/feed/
176 | 0
177 | 171328https://www.brothers-brick.com/2019/02/06/the-tree-house-of-your-lego-dreams/
178 |
179 | You know my methods Watson
180 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/jQqg41VX6KM/
181 | https://www.brothers-brick.com/2019/02/06/you-know-my-methods-watson/#comments
182 | Wed, 06 Feb 2019 20:00:44 +0000
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 | https://www.brothers-brick.com/?p=171365
191 |
192 | When the body of Sir Charles Murgatroyd is discovered in his library, the local Constabulary are immediately called for. Foul play is suspected, and an investigation begins. Despite their best efforts, the police remain baffled as to motive or culprit. Only one hope remains, to summon the consulting detective Sherlock Holmes and his friend Dr John Watson…
193 |
194 |
Since reading The Hound of the Baskervilles as a child, I’ve always been a huge fan of Arthur Conan Doyle’s Sherlock Holmes stories. I’ve often pondered the idea of building scenes from some of his most famous adventures, and this little model was something of a trial. The library-based murder depicted is not based on any particular story, but I’m quite pleased with how it turned out. The trickiest bit of the whole model was the window — it took my ages to get the curtain to look right, and to get the leaded windows to fill the space without gaps.
195 | ]]>
196 | https://www.brothers-brick.com/2019/02/06/you-know-my-methods-watson/feed/
197 | 3
198 | 171365https://www.brothers-brick.com/2019/02/06/you-know-my-methods-watson/
199 |
200 | The LEGO Movie 2 Collectible Minifigures 71023 Feel Guide [Review]
201 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/MqChhN0BHEA/
202 | https://www.brothers-brick.com/2019/02/06/the-lego-movie-2-collectible-minifigures-71023-feel-guide-review/#comments
203 | Wed, 06 Feb 2019 14:00:32 +0000
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 | https://www.brothers-brick.com/?p=171407
214 |
215 | The latest series of LEGO’s Collectible Minifigures theme based The LEGO Movie 2: The SecondPart are now hitting stores. We’ve already brought you our full, in-depth review, so that means it’s now time for our Feel Guide to help you poke and prod your way to a full set of 20 characters. 71023 LEGO Minifigures – The LEGO Movie 2: The Second Part are available now in retail stores and online, for US $3.99 | CAN $4.99.
216 |
217 |
So let’s check out what makes these figures stand out from one another when all you’ve got is an opaque package and a crowd of onlookers in the store aisle.
218 |
219 |
The case
220 |
Like the majority of previous series, this set comes packed in cases of 60. Unlike previous series, they seem to be pretty well sorted within the case, but it’s not perfect. The cases are divided into three rows of 20 packs each. Many collectors have been reporting success with grabbing one full row and getting the full set of 20, or very close to it. We paid extra attention to this when sorting our case, and ours didn’t break down quite that neatly (nor did it break into precisely three full sets, having one figure mismatch). However, even with our case, grabbing a row would have netted you about 17 unique figures, so if you’re short on time or dexterity, this will be your best bet. One big caveat, of course: this only applies to new cases that haven’t already been rifled through by others.
221 |
222 |
This series also differs from previous ones in another way. Several of the characters are packaged with an inner plastic bag. This bag was found in six characters in our review, and the bag was present in all of the packs for that given character. However, we’ve talked with other collectors who found inner bags with other characters, or found these without bags. Ultimately, it seems a bit random (perhaps LEGO has more than factory or production line making this series). So although the inner bags crinkle loudly and might have served as a good indicator on which fig you’re handling, we can’t recommend this method as it doesn’t appear reliable enough. Thankfully, the presence of an inner bag doesn’t have much of an effect on the ability to feel the elements inside.
Download a PDF of this cheat sheet to use on your phone in the store when searching for minifigures. As always, we’ve developed this guide by experience, having started way back with Series 1. And of course, we’ve already sorted lots of The LEGO Movie 2 Collectible Minifigures by feel.
71023 The LEGO Movie 2 Collectible Minifigures are available now from the LEGO Shop Online (US $3.99 | CAN $4.99) and Amazon, as well as third-party sellers on Bricklink and eBay.
236 |
The LEGO Group sent The Brothers Brick a copy of this set for review. Providing TBB with products for review guarantees neither coverage nor positive reviews.
237 | ]]>
238 | https://www.brothers-brick.com/2019/02/06/the-lego-movie-2-collectible-minifigures-71023-feel-guide-review/feed/
239 | 6
240 | 171407https://www.brothers-brick.com/2019/02/06/the-lego-movie-2-collectible-minifigures-71023-feel-guide-review/
241 |
242 | Tropical paradise is a plea for warmer weather
243 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/ddNmm5bsQyI/
244 | https://www.brothers-brick.com/2019/02/06/tropical-paradise-is-a-plea-for-warmer-weather/#comments
245 | Wed, 06 Feb 2019 08:00:39 +0000
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 | https://www.brothers-brick.com/?p=171333
255 |
256 | Those of us in the northwestern hemisphere have had a tough time lately, what with the polar vortex, record-shattering temperatures (as low as -63 degrees Celsius at my mom’s house in Winnipeg, Canada) and unrelenting snow and ice. Even here in southwestern Arkansas, where winter generally just means anything below 10 degrees Celsius, we were racing to buy wintry garments normally only seen in movies about Alaska. On the flip side, the nasty weather meant more time shamelessly spent in the LEGO room. I built this tropical scene while daydreaming about places where I don’t have to leave faucets running for fear of water pipes bursting inside my home.
257 |
258 |
This was a simple but fun build to throw together. There are no crazy techniques or excessively nice parts usages (NPU) to highlight here. But a dash of color, proper composition and a bit of photography know-how can just about always turn a bland build into something that really catches the eye. If you like the trees, they are easily recreated using the 4mm pneumatic hose and cylinder bricks. They can be twisted around each other and held in that position with the leaf elements. Simple and easy jungle tree!
259 | ]]>
260 | https://www.brothers-brick.com/2019/02/06/tropical-paradise-is-a-plea-for-warmer-weather/feed/
261 | 1
262 | 171333https://www.brothers-brick.com/2019/02/06/tropical-paradise-is-a-plea-for-warmer-weather/
263 |
264 | Long have I served as the guardian spirit
265 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/Kr0PmocCLZk/
266 | https://www.brothers-brick.com/2019/02/05/long-have-i-served-as-the-guardian-spirit/#respond
267 | Wed, 06 Feb 2019 02:00:04 +0000
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 | https://www.brothers-brick.com/?p=170980
278 |
279 | Guardian of the Hyrule Forest. Giver of Quests. Insides infested with Skulltula Spiders.
280 | The Great Deku Tree from Nintendo classic Zelda: The Ocarina of Time is given the LEGO treatment by Julius von Brunk. The microscale model is nicely-done, perfectly capturing the tree’s sleepy-looking face. But it’s the amazing photography which sets this creation apart — Julius has combined three images into one to create this stunning look, which manages to make a small model appear much larger. I love how the low angle and out-of-focus foreground foliage gives the tree such physical presence. Excellent stuff.
281 |
282 | ]]>
283 | https://www.brothers-brick.com/2019/02/05/long-have-i-served-as-the-guardian-spirit/feed/
284 | 0
285 | 170980https://www.brothers-brick.com/2019/02/05/long-have-i-served-as-the-guardian-spirit/
286 |
287 | LEGO Millennium Falcon hides in plain sight
288 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/9myV6FZP3fw/
289 | https://www.brothers-brick.com/2019/02/05/lego-millennium-falcon-hides-in-plain-sight/#respond
290 | Tue, 05 Feb 2019 20:00:40 +0000
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 | https://www.brothers-brick.com/?p=170984
299 |
300 | It’s one of the coolest moments in The Empire Strikes Back, when Han Solo evades the Imperials by hiding his ship in plain sight, latched on to the hull of a Star Destroyer. Here this memorable scene is recreated in LEGO bricks by Didier Burtin. The model is immediately recognisable — indeed, at first glance it’s practically indistinguishable from a still from the movie. The Star Destroyer’s surface is impressively detailed, packed with a generous level of detail that breaks up all that grey, and the lighting for the photo is spot-on, managing to capture the stark contrast and drama of the original scene.
301 |
302 | ]]>
303 | https://www.brothers-brick.com/2019/02/05/lego-millennium-falcon-hides-in-plain-sight/feed/
304 | 0
305 | 170984https://www.brothers-brick.com/2019/02/05/lego-millennium-falcon-hides-in-plain-sight/
306 |
307 | The city of Cyrene falls to the Pierian Empire
308 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/HgSjnqROoxQ/
309 | https://www.brothers-brick.com/2019/02/05/the-city-of-cyrene-falls-to-the-pierian-empire/#respond
310 | Tue, 05 Feb 2019 14:00:18 +0000
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 | https://www.brothers-brick.com/?p=171315
321 |
322 | Despite the inclusion of Classical or Greco-Roman characters in several waves of Collectible Minifigures, the ancient world just isn’t as popular with LEGO Castle builders as the big gray castles of the medieval era. As a result, it’s always refreshing to see great LEGO models from that earlier era. Talented TBB alum Mark Erickson has created a fictional battle between rivals the Pierian Empire and the great city of Tylis. Mark’s diorama is full of fantastic architectural detail — I particularly love the contrast between the tan city walls and the shining white temple with its gold details and green roof.
323 |
324 |
325 |
While your eye is certainly drawn past the walls to the beautiful temple, the walls themselves are worth a closer look, with great brick-work in varied, natural colors between what I’m guessing are sandstone columns. This view also shows off the siege tower Mark designed for his Pierian soldiers to surmount the Cyrenian walls.
326 |
327 |
Far too many builders neglect minifigures once they’ve completed their cityscape or spaceship or castle, but minifigures bring a LEGO model to life, and Mark’s scene has minifigure action galore. His minifigures sport custom armor and weapons from BrickWarriors, helping to distinguish the blue-and-silver and red-and-gold factions.
328 |
329 |
Mark says that he missed entering the annual Colossal Castle Contest this past December because he was busy working on this diorama. With off-angle walls throughout, excellent landscaping, and an engaging story told through minifig action, there’s lots to love in this excellent LEGO creation, and very much worth the wait.
330 |
331 | ]]>
332 | https://www.brothers-brick.com/2019/02/05/the-city-of-cyrene-falls-to-the-pierian-empire/feed/
333 | 0
334 | 171315https://www.brothers-brick.com/2019/02/05/the-city-of-cyrene-falls-to-the-pierian-empire/
335 |
336 | 1% inspiration, 99% perspiration
337 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/XqvuZoc8ONA/
338 | https://www.brothers-brick.com/2019/02/05/1-inspiration-99-perspiration/#comments
339 | Tue, 05 Feb 2019 08:00:43 +0000
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 | https://www.brothers-brick.com/?p=170989
348 |
349 | That was Thomas Edison’s recipe for innovation. But he failed to mention the importance of keeping things simple. When it comes to LEGO creations, sometimes the simplest models are the most impressive, and this wonderful LEGO lightbulb by Josephine Monterosso is a great example. It may be comprised of only seven pieces, but this economy of parts only makes it all the more impressive. The transparent minifigure head and clear space helmet make for the perfect recreation of retro lightbulb curves, and the short length of silver ribbed hose is a nice way to evoke a screw thread. Maybe this LEGO lightbulb will give other builders ideas too!
350 |
351 | ]]>
352 | https://www.brothers-brick.com/2019/02/05/1-inspiration-99-perspiration/feed/
353 | 3
354 | 170989https://www.brothers-brick.com/2019/02/05/1-inspiration-99-perspiration/
355 |
356 | Block-rocking beats from this LEGO Walkman
357 | http://feedproxy.google.com/~r/TheBrothersBrick/~3/lzIx-2Y0fmk/
358 | https://www.brothers-brick.com/2019/02/04/block-rocking-beats-from-this-lego-walkman/#respond
359 | Tue, 05 Feb 2019 02:00:47 +0000
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 | https://www.brothers-brick.com/?p=170977
369 |
370 | When Ralf Langer put together his excellent LEGO headphones and tape cassette, all that was missing was something to provide the tunes. Now he’s filled the gap with a brick rendition of the innovative 80s hardware that reinvented how we listened to music — the Sony Walkman. The colour scheme is a perfect match for the 1979 original, and the details down the side are simply spot-on — don’t miss the use of a silver ingot piece and grille bricks to recreate the volume slider, the offsets so the buttons stand out from the casing, and the nice deployment of the “back-to-back grille tile” technique to make those tiny square holes. I also love that silver stripe separating the blue from the grey — excellent attention to detail.
371 |
372 | ]]>
373 | https://www.brothers-brick.com/2019/02/04/block-rocking-beats-from-this-lego-walkman/feed/
374 | 0
375 | 170977https://www.brothers-brick.com/2019/02/04/block-rocking-beats-from-this-lego-walkman/
376 |
377 |
378 |
--------------------------------------------------------------------------------
/samples/stackoverflow:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Why can't `cargo build` compile structopt-derive in VS Code? - Stack Overflow
5 |
6 |
7 | most recent 30 from stackoverflow.com
8 | 2019-02-09T20:44:41Z
9 | https://stackoverflow.com/feeds/question/51744103
10 | http://www.creativecommons.org/licenses/by-sa/3.0/rdf
11 |
12 | https://stackoverflow.com/q/51744103
13 | 2
14 | Why can't `cargo build` compile structopt-derive in VS Code?
15 |
16 |
17 |
18 |
19 | Riduidel
20 | https://stackoverflow.com/users/15619
21 |
22 |
23 | 2018-08-08T10:13:06Z
24 | 2018-08-08T13:02:35Z
25 |
26 |
27 |
28 | <p>I'm trying to write a small CLI application using Rust and the excellent structopt crate.</p>
29 |
30 | <p>When I'm using Notepad++ (to write code) and Conemu (to run Cargo commands), everything works fine.</p>
31 |
32 | <p>However, when I'm using VS Code with <a href="https://github.com/rust-lang-nursery/rls-vscode" rel="nofollow noreferrer">Rust plugin</a> (or Eclipse Corrosion), <code>cargo build</code> command fails with this error</p>
33 |
34 | <pre class="lang-none prettyprint-override"><code> Compiling atty v0.2.10
35 | Compiling clap v2.31.2
36 | Compiling structopt-derive v0.2.10
37 | error: linking with `C:\Program Files (x86)\Microsoft Visual Studio\2017\BuildTools\VC\Tools\MSVC\14.14.26428\bin\HostX64\x64\link.exe` failed: exit code: 1104
38 | |
39 | = note: "C:\\Program Files (x86)\\Microsoft Visual Studio\\2017\\BuildTools\\VC\\Tools\\MSVC\\14.14.26428\\bin\\HostX64\\x64\\link.exe" "/NOLOGO" "/NXCOMPAT" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive0.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive1.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive10.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive11.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive12.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive13.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive14.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive15.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive2.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive3.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive4.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive5.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive6.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive7.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive8.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.structopt_derive9.rcgu.o" "/OUT:C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.dll" "/DEF:C:\\Users\\NICOLA~1\\AppData\\Local\\Temp\\rustc.NMAPUPGalI4H\\lib.def" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.crate.metadata.rcgu.o" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.crate.allocator.rcgu.o" "/OPT:REF,NOICF" "/DEBUG" "/NATVIS:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\etc\\intrinsic.natvis" "/NATVIS:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\etc\\liballoc.natvis" "/NATVIS:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\etc\\libcore.natvis" "/LIBPATH:C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\libsyn-e2bf8da738ad52ef.rlib" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\libquote-90431d93ebae45fd.rlib" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\libproc_macro2-f91721dd8e02bb17.rlib" "C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\libunicode_xid-4611d062b1d773c0.rlib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "proc_macro-1f431d761952eacf.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "syntax-c4a428491fc49b8f.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "rustc_errors-5b01c9a7974f0222.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "syntax_pos-09170bc016e0b11a.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "rustc_data_structures-f974a5ad0e93670e.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "serialize-2eb0aeb35010f869.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "rustc_cratesio_shim-2e9a42f968785601.dll.lib" "/LIBPATH:C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib" "std-81327c94ecbc69b1.dll.lib" "C:\\Users\\nicolas-delsaux\\.rustup\\toolchains\\stable-x86_64-pc-windows-msvc\\lib\\rustlib\\x86_64-pc-windows-msvc\\lib\\libcompiler_builtins-e8d853735a158029.rlib" "opengl32.lib" "kernel32.lib" "setupapi.lib" "msimg32.lib" "credui.lib" "winspool.lib" "user32.lib" "gdi32.lib" "secur32.lib" "dbghelp.lib" "advapi32.lib" "advapi32.lib" "ws2_32.lib" "userenv.lib" "shell32.lib" "msvcrt.lib" "/DLL" "/IMPLIB:C:\\Users\\nicolas-delsaux\\Documents\\open-source\\rrss2imap\\target\\debug\\deps\\structopt_derive-406f571196e63046.dll.lib"
40 | = note: LINK : fatal error LNK1104: impossible d'ouvrir le fichier 'C:\Users\nicolas-delsaux\Documents\open-source\rrss2imap\target\debug\deps\structopt_derive-406f571196e63046.dll'
41 |
42 |
43 | error: aborting due to previous error
44 |
45 | error: Could not compile `structopt-derive`.
46 | warning: build failed, waiting for other jobs to finish...
47 | error: build failed
48 | </code></pre>
49 |
50 | <p>It seems like some process has locked the output file, but LockHunter (which I use to detect that kind of locks) doesn't detect any...</p>
51 |
52 | <p>What is the problem? What can I do - beside coding using Notepad++ - to be able to run cargo commands in VS Code?</p>
53 |
54 |
55 |
56 |
57 | https://stackoverflow.com/questions/51744103/-/51744721#51744721
58 | 2
59 | Answer by Riduidel for Why can't `cargo build` compile structopt-derive in VS Code?
60 |
61 | Riduidel
62 | https://stackoverflow.com/users/15619
63 |
64 |
65 | 2018-08-08T10:45:07Z
66 | 2018-08-08T10:45:07Z
67 | <p>Seems like it's a bug in RLS : <a href="https://github.com/rust-lang-nursery/rls/issues/802" rel="nofollow noreferrer">Windows: RLS keeping derive plugin DLLs opened prevents <code>cargo build</code> from working #802</a></p>
68 |
69 | <blockquote>
70 | <p>Whenever the RLS is running for a crate, I can't do cargo build for that crate. It fails with errors like this:</p>
71 |
72 | <p>[...]</p>
73 |
74 | <p>Looking in Process Explorer, RLS has the derive plugin DLLs loaded. I assume this is what's causing cargo to fail, since it can't write to those files while they're loaded. Similarly, cargo clean fails:</p>
75 |
76 | <p>[...]</p>
77 |
78 | <p>If I close VSCode (and thus RLS), building with cargo build works fine again.</p>
79 |
80 | <p>I assume this is Windows-specific due to its file exclusivity behavior. I think this started happening with a recent nightly (3/28?). I suppose this could've been caused by some change in cargo or rustc causing it to write to dlls which were already built or something.</p>
81 | </blockquote>
82 |
83 | <p>So solution should be quite simple : update RLS to its latest version and see the bug being fixed !</p>
84 |
85 | <p>And to update RLS, it's simply a matter of <code>rustup update</code></p>
86 |
87 |
--------------------------------------------------------------------------------
/src/config.rs:
--------------------------------------------------------------------------------
1 | use super::settings::*;
2 |
3 | /// This structure defines the feed-level config.
4 | /// All elements here may be configured twice : once at feed level, and once at global level.
5 | /// Obviously, all elements which are not defined at feed level use global configuration
6 | #[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
7 | pub struct Config {
8 | /// When set, contains the email address used
9 | #[serde(skip_serializing_if = "Option::is_none")]
10 | pub email: Option,
11 | /// When set, contains the folder in which entries for feed will be written
12 | #[serde(skip_serializing_if = "Option::is_none")]
13 | pub folder: Option,
14 | /// When defined, this from field will be used instead of trying to construct it from feed title
15 | #[serde(skip_serializing_if = "Option::is_none")]
16 | pub from: Option,
17 | /// When set to true, images will be inlined
18 | #[serde(
19 | skip_serializing_if = "Settings::is_false",
20 | default = "Settings::default_false"
21 | )]
22 | pub inline_image_as_data: bool,
23 | }
24 |
25 | impl Config {
26 | /// Creates a new instance with all fields set to default "falsy" values : options are set to none and booleans to false
27 | pub fn new() -> Config {
28 | Config {
29 | email: None,
30 | folder: None,
31 | inline_image_as_data: false,
32 | from: None,
33 | }
34 | }
35 |
36 | /// Creates a string view of config.
37 | /// More precisely, outputs the email address and folder in which entries are to be written
38 | /// A default config is given for options set to None.
39 | pub fn to_string(self, default: &Config) -> String {
40 | format!(
41 | "(to: {}) {}",
42 | self.email.unwrap_or_else(|| format!(
43 | "{} (default)",
44 | default.clone().email.unwrap_or_else(|| "".to_owned())
45 | )),
46 | self.folder.unwrap_or_else(|| format!(
47 | "{} (default)",
48 | default.clone().folder.unwrap_or_else(|| "".to_owned())
49 | ))
50 | )
51 | }
52 |
53 | /// Used by serde to skip serialization of default config for feeds
54 | /// This method check if config is the default one (consisting only into None options)
55 | pub fn is_none(config: &Config) -> bool {
56 | config.email.is_none()
57 | && config.folder.is_none()
58 | && config.from.is_none()
59 | && !config.inline_image_as_data
60 | }
61 |
62 | /// Clear all content from this config excepted email address
63 | pub fn clear(&mut self) {
64 | self.folder = None;
65 | }
66 |
67 | /// Get the email value for that feed, be it defined locally or from the default config
68 | pub fn get_email(&self, default: &Config) -> String {
69 | self.clone()
70 | .email
71 | .unwrap_or_else(|| default.clone().email.unwrap_or_else(|| "".to_owned()))
72 | }
73 |
74 | /// Get the folder value for that feed, be it defined locally or from the default config
75 | pub fn get_folder(&self, default: &Config) -> String {
76 | self.clone()
77 | .folder
78 | .unwrap_or_else(|| default.clone().folder.unwrap_or_else(|| "".to_owned()))
79 | }
80 |
81 | /// Compute an inline flag by resolving the two flags with this struct inline images status
82 | pub fn inline(&self, inline:bool, do_not_inline:bool)->bool {
83 | if self.inline_image_as_data {
84 | !do_not_inline
85 | } else {
86 | inline
87 | }
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/src/export.rs:
--------------------------------------------------------------------------------
1 | use std::path::PathBuf;
2 |
3 | use std::fs;
4 |
5 | use std::collections::HashMap;
6 |
7 | use super::feed::Feed;
8 | use super::store::Store;
9 |
10 | use treexml::*;
11 |
12 | pub fn export(to_file: &PathBuf, to_store: &Store) {
13 | // First group feeds per storage folder
14 | let grouped = group_feeds(to_store);
15 | // Then write this map of lists
16 | write(to_file, grouped);
17 | }
18 |
19 | fn group_feeds(to_store: &Store) -> HashMap> {
20 | to_store.feeds.iter().fold(HashMap::new(), |mut map, feed| {
21 | let feed = feed.clone();
22 | let folder = feed.config.get_folder(&to_store.settings.config);
23 | if !map.contains_key(&folder) {
24 | map.insert(folder.clone(), vec![]);
25 | }
26 | let mut updated = vec![feed];
27 | updated.append(map.get_mut(&folder).unwrap());
28 | map.insert(folder, updated);
29 | // Return value of closure (which is *not* a return statement ;-)
30 | map
31 | })
32 | }
33 |
34 | fn write(to_file: &PathBuf, to_store: HashMap>) {
35 | // warn!("exporting feeds {:?}", to_store);
36 | // Prepare the document by setting all boilerplate elements (root, head, body, ...)
37 | let mut root = Element::new("opml");
38 | root.attributes
39 | .insert("version".to_owned(), "1.0".to_owned());
40 | let mut header = Element::new("head");
41 | let mut title = Element::new("title");
42 | title.text = Some("rrss2imap OPML Export".to_owned());
43 | header.children.push(title);
44 | root.children.push(header);
45 | let mut body = Element::new("body");
46 | // Now fill body with outline elements generated from feeds
47 | for (folder, elements) in to_store {
48 | let mut folder_element = Element::new("outline");
49 | folder_element
50 | .attributes
51 | .insert("text".to_owned(), folder.clone());
52 | folder_element
53 | .attributes
54 | .insert("title".to_owned(), folder.clone());
55 | for feed in elements {
56 | let mut outline = Element::new("outline");
57 | outline
58 | .attributes
59 | .insert("type".to_owned(), "rss".to_owned());
60 | outline
61 | .attributes
62 | .insert("text".to_owned(), feed.url.clone());
63 | outline
64 | .attributes
65 | .insert("xmlUrl".to_owned(), feed.url.clone());
66 | folder_element.children.push(outline);
67 | }
68 | body.children.push(folder_element);
69 | }
70 | // Don't forget to add body after, otherwise we enter into the dangerous realm of borrowed values
71 | root.children.push(body);
72 | let mut document = Document::new();
73 | document.root = Some(root);
74 | fs::write(to_file, format!("{}", document))
75 | .unwrap_or_else(|_| panic!("Unable to write file {:?}", to_file));
76 | }
77 |
--------------------------------------------------------------------------------
/src/feed.rs:
--------------------------------------------------------------------------------
1 | use chrono::{NaiveDateTime};
2 | use tests_bin::unit_tests;
3 |
4 | use super::config::*;
5 |
6 | use super::feed_reader::*;
7 | use super::settings::*;
8 | use super::syndication;
9 | use super::message::*;
10 |
11 | #[unit_tests("feed.rs")]
12 | #[derive(Clone, Debug, Deserialize, Serialize, PartialEq)]
13 | pub struct Feed {
14 | /// Contains url of feed
15 | pub url: String,
16 | /// Contains specific configuration for field
17 | #[serde(skip_serializing_if = "Config::is_none", default = "Config::new")]
18 | pub config: Config,
19 | /// Last time the feed was read
20 | #[serde(default = "Feed::at_epoch")]
21 | pub last_updated: NaiveDateTime,
22 | /// Last message stored in IMAP, allows to correctly process feeds even when no date is provided
23 | /// which, mind you, is totally possible according to RSS specification
24 | #[serde(skip_serializing_if = "Option::is_none")]
25 | pub last_message: Option
26 | }
27 |
28 | impl Feed {
29 | /// Creates a new naivedatetime with a default value (which is, to my mind) a sensible default for computers
30 | pub fn at_epoch() -> NaiveDateTime {
31 | NaiveDateTime::from_timestamp_opt(0, 0).unwrap()
32 | }
33 |
34 | // Convert the parameters vec into a valid feed (if possible)
35 | pub fn from_vec(parameters: Vec) -> Feed {
36 | let mut consumed = parameters;
37 | let url: String = consumed
38 | .pop()
39 | .expect("You must at least define an url to add.");
40 | let mut email: Option = None;
41 | let mut folder: Option = None;
42 | // If there is a second parameter, it can be either email or folder
43 | if !consumed.is_empty() {
44 | let second = consumed.pop().unwrap();
45 | // If second parameters contains an @, I suppose it is an email address
46 | if second.contains('@') {
47 | debug!(
48 | "Second add parameter {} is considered an email address",
49 | second
50 | );
51 | email = Some(second)
52 | } else {
53 | warn!("Second add parameter {} is NOT considered an email address, but a folder. NO MORE ARGUMENTS WILL BE PROCESSED", second);
54 | folder = Some(second)
55 | }
56 | }
57 | // If there is a third parameter, it is the folder.
58 | // But if folder was already defined, there is an error !
59 | if !consumed.is_empty() && folder.is_none() {
60 | folder = Some(consumed.pop().unwrap());
61 | }
62 | Feed {
63 | url,
64 | config: Config {
65 | email,
66 | folder,
67 | from: None,
68 | inline_image_as_data: false,
69 | },
70 | last_updated: Feed::at_epoch(),
71 | last_message: None
72 | }
73 | }
74 |
75 | pub fn from_all(url:Option, email:Option, destination:Option, inline:bool) -> Feed {
76 | Feed {
77 | url: url.unwrap(),
78 | config: Config {
79 | email,
80 | folder: destination,
81 | from: None,
82 | inline_image_as_data: inline,
83 | },
84 | last_updated: Feed::at_epoch(),
85 | last_message: None
86 | }
87 | }
88 |
89 | pub fn to_string(&self, config: &Config) -> String {
90 | format!("{} {}", self.url, self.config.clone().to_string(config))
91 | }
92 |
93 | /**
94 | * Read the feed and produce the list of messages to write later
95 | */
96 | pub fn read(&self, index:usize, count:&usize) -> Vec {
97 | info!("Reading feed {}/{} from {}", index+1, count, self.url);
98 | match ureq::get(&self.url).call() {
99 | Ok(response) => match response.into_string() {
100 | Ok(text) => return self.read_response_text(text),
101 | Err(e) => error!("There is no text at {} due to error {}", &self.url, e),
102 | },
103 | Err(e) => error!("Unable to get {} due to {}.\nTODO Add better http response analysis !", &self.url, e),
104 | }
105 | vec![]
106 | }
107 |
108 | pub fn read_response_text(&self, text:String) -> Vec {
109 | match text.parse::() {
110 | Ok(parsed) => {
111 | return match parsed {
112 | syndication::Feed::Atom(atom_feed) => {
113 | AtomReader {}.read(self, &atom_feed)
114 | }
115 | syndication::Feed::RSS(rss_feed) => {
116 | RssReader {}.read(self, &rss_feed)
117 | }
118 | }
119 | }
120 | Err(e) => error!("Content ar {} is neither Atom, nor RSS {}.\nTODO check real content type to help user.", &self.url, e),
121 | }
122 | vec![]
123 | }
124 |
125 | pub fn process_message(&self, settings:&Settings, message:&Message)->Message {
126 | Message {
127 | authors: message.authors.clone(),
128 | content: Message::get_processed_content(&message.content, self, settings).unwrap(),
129 | id: message.id.clone(),
130 | last_date: message.last_date,
131 | links: message.links.clone(),
132 | title: message.title.clone(),
133 | }
134 | }
135 |
136 | /// Find in the given input feed the new messages
137 | /// A message is considered new if it has a date which is nearer than feed last processed date
138 | /// or (because RSS and Atom feeds may not have dates) if its id is not yet the id of the last
139 | /// processed feed
140 | pub fn find_new_messages(&self, sorted_messages:&[Message])->(usize, usize, bool) {
141 | let head:usize = 0;
142 | let mut tail:usize = 0;
143 | let mut found = false;
144 | // Now do the filter
145 | // This part is not so easy.
146 | // we will first iterate over the various items and for each, check that
147 | // 1 - the message id is not the last read message one
148 | // 2 - if messages have dates, the message date is more recent than the last one
149 | for (position, message) in sorted_messages.iter().enumerate() {
150 | if !found {
151 | match &self.last_message {
152 | Some(id) => if id==&message.id {
153 | tail = position;
154 | found = true;
155 | break;
156 | },
157 | None => {}
158 | };
159 | if message.last_date)->Feed {
169 | let sorted_messages = extracted;
170 | let (head, tail, found) = self.find_new_messages(sorted_messages.as_slice());
171 | let filtered_messages:&[Message] = if found {
172 | &sorted_messages[head..tail]
173 | } else {
174 | sorted_messages.as_slice()
175 | };
176 |
177 | // And write the messages into IMAP and the feed into JSON
178 | let written_messages:Vec = filtered_messages.iter()
179 | .map(|message| self.process_message(settings, message))
180 | .inspect(|e| if !settings.do_not_save { e.write_to_imap(self, settings) } )
181 | .collect();
182 | let mut last_message:Option<&Message> = written_messages.iter()
183 | // ok, there is a small problem here: if at least two elements have the same value - which is the case when feed
184 | // elements have no dates - the LAST one is used (which is **not** what we want)
185 | // see https://doc.rust-lang.org/std/iter/trait.Iterator.html#method.max_by_key
186 | .max_by_key(|e| e.last_date.timestamp());
187 | // So, to overcome last problem, if first filtered message has same date than last_message, we replace last by first
188 | // As RSS feeds are supposed to put the latest emitted message in first position
189 | match last_message {
190 | Some(last) => if filtered_messages.len()>1 && filtered_messages[0].last_date==last.last_date {
191 | last_message = Some(&filtered_messages[0]);
192 | },
193 | _ => {}
194 | }
195 |
196 | let mut returned = self.clone();
197 | if settings.do_not_save {
198 | warn!("do_not_save is set. As a consequence, feed won't be updated");
199 | } else {
200 | match last_message {
201 | Some(message) => {
202 | returned.last_updated = message.last_date;
203 | returned.last_message = Some(message.id.clone());
204 | },
205 | _ => {}
206 | }
207 | }
208 | returned
209 | }
210 | }
211 |
--------------------------------------------------------------------------------
/src/feed_errors.rs:
--------------------------------------------------------------------------------
1 | use custom_error::custom_error;
2 |
3 | custom_error!{
4 | pub UnparseableFeed
5 | DateIsNotRFC2822{value:String} = "Date {value} is not RFC-2822 compliant",
6 | DateIsNotRFC3339{value:String} = "Date {value} is not RFC-3339 compliant",
7 | DateIsNeitherRFC2822NorRFC3339{value:String} = "Date {value} is neither RFC-2822 nor RFC-3339 compliant",
8 | ChronoCantParse{source: chrono::ParseError} = "chrono can't parse date",
9 | NoDateFound = "absolutly no date field was found in feed",
10 | CantExtractImages{source: super::message::UnprocessableMessage} = "Seems like it was not possible to read message contained images"
11 | }
--------------------------------------------------------------------------------
/src/feed_reader.rs:
--------------------------------------------------------------------------------
1 | use chrono::{DateTime, Utc, FixedOffset, NaiveDateTime};
2 |
3 | use super::feed_errors::*;
4 | use super::message::*;
5 | use atom_syndication::Entry as AtomEntry;
6 | use atom_syndication::Feed as AtomFeed;
7 | use rss::Channel as RssChannel;
8 | use rss::Item as RssItem;
9 | use url::Url;
10 |
11 | use super::feed::*;
12 | use super::feed_utils::*;
13 |
14 | /// The reader trait allow reading data from a web source.
15 | /// It is supposed to be derived for Rss and Atom, but it's only a try currently ...
16 | pub trait Reader {
17 | fn extract(&self, entry:&EntryType, source:&FeedType) -> Result;
18 | fn read_feed_date(&self, source:&FeedType)->NaiveDateTime;
19 |
20 | fn extract_messages(&self, source:&FeedType)->Vec>;
21 |
22 | fn read(&self, feed:&Feed, source:&FeedType)->Vec {
23 | debug!("reading feed {}", &feed.url);
24 | let feed_date = self.read_feed_date(source);
25 | info!(
26 | "Feed date is {} while previous read date is {}",
27 | feed_date, feed.last_updated
28 | );
29 | let extracted:Vec> = self.extract_messages(source);
30 |
31 | let messages:Result, UnparseableFeed> = extracted.into_iter().collect();
32 | messages.unwrap_or(vec![])
33 | }
34 | }
35 |
36 | pub struct AtomReader {}
37 |
38 | impl AtomReader {
39 | fn extract_authors_from_atom(entry: &AtomEntry, feed: &AtomFeed) -> Vec<(String, String)> {
40 | let domain = AtomReader::find_atom_domain(feed);
41 | // This is where we also transform author names into urls in order
42 | // to have valid email addresses everywhere
43 | let mut message_authors: Vec = entry
44 | .authors()
45 | .iter()
46 | .map(|a| a.name().to_owned())
47 | .collect();
48 | if message_authors.is_empty() {
49 | message_authors = vec![feed.title().to_owned().to_string()]
50 | }
51 | sanitize_message_authors(message_authors, domain)
52 | }
53 |
54 | fn find_atom_domain(feed: &AtomFeed) -> String {
55 | return feed
56 | .links()
57 | .iter()
58 | .filter(|link| link.rel() == "self" || link.rel() == "alternate").find(|link| !link.href().is_empty())
59 | // Get the link
60 | .map(|link| link.href())
61 | // Transform it into an url
62 | .map(|href| Url::parse(href).unwrap())
63 | // then get host
64 | .map(|url| url.host_str().unwrap().to_string())
65 | // and return value
66 | .unwrap_or("todo.find.domain.rss".to_string());
67 | }
68 | }
69 |
70 | impl Reader for AtomReader {
71 | fn extract(&self, entry: &AtomEntry, source: &AtomFeed) -> Result {
72 | info!("Reading atom entry {} from {:?}", entry.id(), entry.links());
73 | let authors = AtomReader::extract_authors_from_atom(entry, source);
74 | let last_date = entry
75 | .updated()
76 | .naive_utc();
77 | let content = match entry.content() {
78 | Some(content) => content.value().unwrap(),
79 | None => match entry.summary() {
80 | Some(text)=> text.as_str(),
81 | None=>""
82 | }
83 | }
84 | .to_owned();
85 | let message = Message {
86 | authors,
87 | content,
88 | id: entry.id().to_owned(),
89 | last_date,
90 | links: entry.links().iter().map(|l| l.href().to_owned()).collect(),
91 | title: entry.title().as_str().to_string()
92 | };
93 | Ok(message)
94 | }
95 |
96 | fn read_feed_date(&self, source:&AtomFeed)->NaiveDateTime {
97 | source.updated().naive_utc()
98 | }
99 |
100 | fn extract_messages(&self, source:&AtomFeed)->Vec> {
101 | source.entries()
102 | .iter()
103 | .map(|e| self.extract(e, source))
104 | .collect()
105 | }
106 | }
107 |
108 | pub struct RssReader {}
109 |
110 | impl RssReader {
111 | fn extract_authors_from_rss(entry: &RssItem, feed: &RssChannel) -> Vec<(String, String)> {
112 | let domain = RssReader::find_rss_domain(feed);
113 | // This is where we also transform author names into urls in order
114 | // to have valid email addresses everywhere
115 | let message_authors: Vec;
116 | match entry.author() {
117 | Some(l) => message_authors = vec![l.to_owned()],
118 | _ => message_authors = vec![feed.title().to_owned()],
119 | }
120 | sanitize_message_authors(message_authors, domain)
121 | }
122 | fn find_rss_domain(feed: &RssChannel) -> String {
123 | return Some(feed.link())
124 | .map(|href| Url::parse(href).unwrap())
125 | // then get host
126 | .map(|url| url.host_str().unwrap().to_string())
127 | // and return value
128 | .unwrap_or("todo.find.domain.atom".to_string());
129 | }
130 |
131 | fn try_hard_to_parse(date:String) -> Result, UnparseableFeed> {
132 | let parsed = rfc822_sanitizer::parse_from_rfc2822_with_fallback(&date);
133 | if parsed.is_ok() {
134 | Ok(parsed?)
135 | } else {
136 | let retry = DateTime::parse_from_rfc3339(&date);
137 | if retry.is_ok() {
138 | Ok(retry?)
139 | } else {
140 | Err(UnparseableFeed::DateIsNeitherRFC2822NorRFC3339 {value:date})
141 | }
142 | }
143 | }
144 |
145 | fn extract_date_from_rss(entry: &RssItem, feed: &RssChannel) -> Result, UnparseableFeed> {
146 | if entry.pub_date().is_some() {
147 | let mut pub_date = entry.pub_date().unwrap().to_owned();
148 | pub_date = pub_date.replace("UTC", "UT");
149 | RssReader::try_hard_to_parse(pub_date)
150 | } else if entry.dublin_core_ext().is_some()
151 | && !entry.dublin_core_ext().unwrap().dates().is_empty()
152 | {
153 | let pub_date = &entry.dublin_core_ext().unwrap().dates()[0];
154 | Ok(DateTime::parse_from_rfc3339(pub_date)?)
155 | } else {
156 | debug!("feed item {:?} date can't be parsed, as it doesn't have neither pub_date nor dc:pub_date. We will replace it with feed date if possible",
157 | &entry.link()
158 | );
159 | if feed.pub_date().is_some() {
160 | let pub_date = feed.pub_date().unwrap().to_owned();
161 | RssReader::try_hard_to_parse(pub_date)
162 | } else if feed.last_build_date().is_some() {
163 | let last_pub_date = feed.last_build_date().unwrap().to_owned();
164 | RssReader::try_hard_to_parse(last_pub_date)
165 | } else {
166 | Ok(DateTime::::from_utc(
167 | Feed::at_epoch(),
168 | FixedOffset::east_opt(0).unwrap()))
169 | }
170 | }
171 | }
172 | }
173 |
174 | impl Reader for RssReader {
175 | fn extract(&self, entry: &RssItem, source: &RssChannel) -> Result {
176 | info!("Reading RSS entry {:?} from {:?}", entry.guid(), entry.link());
177 | let authors = RssReader::extract_authors_from_rss(entry, source);
178 | let content = entry
179 | .content()
180 | .unwrap_or_else(|| entry.description().unwrap_or(""))
181 | // First step is to fix HTML, so load it using html5ever
182 | // (because there is no better html parser than a real browser one)
183 | // TODO implement image inlining
184 | .to_owned();
185 | let links = match entry.link() {
186 | Some(l) => vec![l.to_owned()],
187 | _ => vec![],
188 | };
189 | let id = if links.is_empty() {
190 | match entry.guid() {
191 | Some(g) => g.value().to_owned(),
192 | _ => "no id".to_owned(),
193 | }
194 | } else {
195 | links[0].clone()
196 | };
197 | let last_date = RssReader::extract_date_from_rss(entry, source);
198 | let message = Message {
199 | authors,
200 | content,
201 | id,
202 | last_date: last_date?.naive_utc(),
203 | links,
204 | title: entry.title().unwrap_or("").to_owned(),
205 | };
206 | Ok(message)
207 | }
208 |
209 | fn extract_messages(&self, source:&RssChannel)->Vec> {
210 | source.items()
211 | .iter()
212 | .map(|e| self.extract(e, source))
213 | .collect()
214 | }
215 |
216 | fn read_feed_date(&self, source:&RssChannel)->NaiveDateTime {
217 | let n = Utc::now();
218 | let feed_date_text = match source.pub_date() {
219 | Some(p) => p.to_owned(),
220 | None => match source.last_build_date() {
221 | Some(l) => l.to_owned(),
222 | None => n.to_rfc2822(),
223 | },
224 | };
225 | DateTime::parse_from_rfc2822(&feed_date_text)
226 | .unwrap()
227 | .naive_utc()
228 |
229 | }
230 | }
231 |
--------------------------------------------------------------------------------
/src/feed_utils.rs:
--------------------------------------------------------------------------------
1 | use regex::Regex;
2 | use tests_bin::unit_tests;
3 |
4 | ///
5 | /// Sanitize a list of message authors
6 | ///
7 | /// # Arguments
8 | ///
9 | /// * `message_authors` a list of message autros to sanitize
10 | /// * `domain` a default domain string, used when domain is given
11 | #[unit_tests("feed_utils/can_sanitize_message_authors.rs")]
12 | pub fn sanitize_message_authors(message_authors:Vec, domain:String)->Vec<(String, String)> {
13 | let fixed = message_authors
14 | .iter()
15 | .map(|author| {
16 | sanitize_email(author, &domain)
17 | })
18 | .collect();
19 | fixed
20 | }
21 |
22 | ///
23 | /// Trim the input string using the given set of characters as potential separators
24 | ///
25 | /// # Arguments
26 | ///
27 | /// * `text` text to trim
28 | /// * `characters` characters to use as separator
29 | ///
30 | /// # Return
31 | ///
32 | /// The trimmed text
33 | ///
34 | #[unit_tests("feed_utils/can_trim_to_chars.rs")]
35 | fn trim_to_chars(text:&str, characters:Vec<&str>)->String {
36 | let mut remaining = text;
37 | for cutter in characters {
38 | let elements:Vec<&str> = remaining.split(cutter).collect();
39 | remaining = elements[0].trim();
40 | }
41 | remaining.to_string()
42 | }
43 |
44 | ///
45 | /// Sanitizes email using "good" regular expression
46 | /// (which I obviously don't understand anymore) able to remove unwanted characters in email address
47 | #[unit_tests("feed_utils/can_sanitize_email.rs")]
48 | pub fn sanitize_email(email:&String, domain:&String)->(String, String) {
49 | lazy_static! {
50 | static ref EMAIL_AND_NAME_DETECTOR:Regex =
51 | Regex::new("([[:alpha:]_%\\+\\-\\.]+@[[:alpha:]_%\\+\\-]+\\.[[:alpha:]_%\\+\\-]+{1,}) \\(([^\\)]*)\\)").unwrap();
52 | }
53 | lazy_static! {
54 | static ref BAD_CHARACTER_REMOVER:Regex =
55 | Regex::new("[^[:alnum:].]").unwrap();
56 | }
57 | if EMAIL_AND_NAME_DETECTOR.is_match(email) {
58 | let captures = EMAIL_AND_NAME_DETECTOR.captures(email).unwrap();
59 | // Maybe we could rewrite it in a better way
60 | let name:String = captures.get(2).unwrap().as_str().to_string();
61 | let email:String = captures.get(1).unwrap().as_str().to_string();
62 | (name, email)
63 | } else {
64 | // When no email is provided, use domain name
65 | let email = if email.is_empty() {
66 | domain
67 | } else {
68 | email
69 | };
70 | // Remove bad characters
71 | let trimmed:String = trim_to_chars(email, vec!["|", ":", "-", "<", ">"]);
72 | let lowercased = trimmed.to_lowercase();
73 | let tuple = (trimmed,
74 | BAD_CHARACTER_REMOVER.replace_all(&lowercased, "_")
75 | );
76 | (tuple.0, format!("{}@{}", tuple.1, domain))
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/src/image_to_data.rs:
--------------------------------------------------------------------------------
1 | use base64::engine::*;
2 |
3 | use lol_html::{rewrite_str, element, RewriteStrSettings};
4 | use lol_html::errors::*;
5 | use tests_bin::unit_tests;
6 |
7 | #[unit_tests("image_to_data.rs")]
8 | pub fn transform(document: &String) -> Result {
9 |
10 | rewrite_str(document,
11 | RewriteStrSettings {
12 | element_content_handlers: vec![
13 | // Rewrite images having src where src doesn't start with data
14 | element!("img[src]", |el| {
15 | let src:String = el
16 | .get_attribute("src")
17 | .unwrap();
18 | debug!("processing image at url {}", &src);
19 |
20 | if !src.starts_with("data") {
21 | // Now it's time to rewrite!
22 | // Now download image source and base64 encode it !
23 | debug!("reading image from {}", &src);
24 | if let Ok(response) = ureq::get(&src).call() {
25 | let mut image: Vec = vec![];
26 | if let Ok(_value) = response.into_reader().read_to_end(&mut image) {
27 | let image_bytes = image.as_slice();
28 | let encoded = general_purpose::STANDARD_NO_PAD.encode(image_bytes);
29 | let image_mime_type = tree_magic_mini::from_u8(image_bytes);
30 | let encoded_image = format!("data:{};base64,{}", image_mime_type, encoded);
31 | el.set_attribute("src", &encoded_image).unwrap();
32 | }
33 | }
34 | }
35 |
36 | Ok(())
37 | })
38 | ],
39 | ..RewriteStrSettings::default()
40 | })
41 | }
42 |
--------------------------------------------------------------------------------
/src/import.rs:
--------------------------------------------------------------------------------
1 | use std::path::PathBuf;
2 |
3 | use std::fs::File;
4 | use std::io::Read;
5 |
6 | use super::config::Config;
7 | use super::feed::Feed;
8 | use super::store::Store;
9 |
10 | use treexml::*;
11 |
12 | pub fn import(from_file: &PathBuf, to_store: &mut Store) {
13 | let mut file =
14 | File::open(from_file).unwrap_or_else(|_| panic!("Unable to open file {:?}", from_file));
15 | let mut contents = String::new();
16 | file.read_to_string(&mut contents)
17 | .unwrap_or_else(|_| panic!("Unable to read file {:?}", from_file));
18 |
19 | let doc = Document::parse(contents.as_bytes()).unwrap();
20 | let root = doc.root.unwrap();
21 |
22 | // old style parsing is good, because it is old :-)
23 | for element in root.children {
24 | match element.name.as_ref() {
25 | "head" => debug!("Reading {}", element),
26 | "body" => import_body(element, to_store, ""),
27 | _ => error!("element {:?} was unexpected, please fill a bug !", element),
28 | }
29 | }
30 | }
31 |
32 | fn import_body(body: Element, to_store: &mut Store, folder: &str) {
33 | for element in body.children {
34 | match element.name.as_ref() {
35 | "outline" => import_outline(element, to_store, folder),
36 | _ => error!("element {:?} was unexpected, please fill a bug!", element),
37 | }
38 | }
39 | }
40 |
41 | fn import_outline(outline: Element, to_store: &mut Store, folder: &str) {
42 | if outline.children.is_empty() {
43 | // An outline without children is considered an OPML entry. Does it have the right set of attributes ?
44 | if outline.attributes.contains_key("type")
45 | && outline.attributes.contains_key("text")
46 | && outline.attributes.contains_key("xmlUrl")
47 | {
48 | let url = outline.attributes.get("xmlUrl");
49 | let feed = Feed {
50 | url: url.unwrap().to_string(),
51 | config: Config {
52 | email: None,
53 | folder: Some(folder.to_string()),
54 | from: None,
55 | inline_image_as_data: false,
56 | },
57 | last_updated: Feed::at_epoch(),
58 | last_message: None,
59 | };
60 | to_store.add_feed(feed);
61 | } else {
62 | error!("outline {:?} has no children, but doesn't has the right set of attributes. Please fill a bug!", outline.attributes);
63 | }
64 | } else {
65 | // An outline with children is considered an OPML folder. Does it have the right set of attributes ?
66 | if outline.attributes.contains_key("text") && outline.attributes.contains_key("title") {
67 | let folder = &outline.attributes["text"];
68 | import_body(outline.clone(), to_store, &folder.to_string());
69 | } else {
70 | error!("outline {:?} has children, but doesn't has the right set of attributes. Please fill a bug!", outline.attributes);
71 | }
72 | }
73 | }
74 |
--------------------------------------------------------------------------------
/src/main.rs:
--------------------------------------------------------------------------------
1 | //! Application transforming rss feeds into email by directly pushing the entries into IMP folders.
2 | //! This application is an adaption of the rss2imap Python script to Rust.
3 | //!
4 | //! #### How to use ?
5 | //!
6 | //! The simplest way to understand what to do is just to run `rrss2imap --help`
7 | //!
8 | //! It should output something like
9 | //!
10 | //! FLAGS:
11 | //! -h, --help Prints help information
12 | //! -V, --version Prints version information
13 | //!
14 | //! SUBCOMMANDS:
15 | //! add Adds a new feed given its url
16 | //! delete Delete the given feed
17 | //! email Changes email address used in feed file to be the given one
18 | //! export Export subscriptions as opml file
19 | //! help Prints this message or the help of the given subcommand(s)
20 | //! import import the given opml file into subscriptions
21 | //! list List all feeds configured
22 | //! new Creates a new feedfile with the given email address
23 | //! reset Reset feedfile (in other words, remove everything)
24 | //! run Run feed parsing and transformation
25 | //!
26 | //! Which give you a glimpse of what will happen
27 | //!
28 | //! Each of these commands also provide some help, when run with the same `--help` flag.
29 | //!
30 | //! The important operations to memorize are obviously
31 | //!
32 | //! #### `rrss2imap new`
33 | //!
34 | //! Creates a new `config.json` file in the configuration directory
35 | //! (`~/.config/rrss2imap/` on linux,
36 | //! `~/Library/Preferences/org.Rrss2imap.rrss2imap` on macOS,
37 | //! `AppData\Roaming\Rrss2imap\rrss2imap\` on Windows). At init time, the
38 | //! config file will only contains `settings` element with the email address
39 | //! set. You **have** to edit this file and set
40 | //!
41 | //! * the used imap server
42 | //! ** with user login and password
43 | //! ** and security settings (secure should contain `{"Yes": secure port}` for
44 | //! imap/s or `{"No": unsecure port}` for simple imap)
45 | //! * the default config
46 | //! ** folder will be the *full path to an imap folder* where entries will
47 | //! fall in (e.g., `INBOX.News`). The exact syntax depends on your email provider.
48 | //! ** email will be the recipient email address (which may not be yours for easier filtering)
49 | //! ** Base64 image inlining
50 | //!
51 | //! `feeds` is the list of all rss feeds; use `rrss2imap add` to add a new feed.
52 | //!
53 | //! #### `rrss2imap add`
54 | //!
55 | //! This command will add a new feed to your config. You can directly set here the email recipient as well as the folder
56 | //! (but not the base64 image inlining parameter)
57 | //!
58 | //! #### `rrss2imap run`
59 | //!
60 | //! THis is the main command. It will
61 | //!
62 | //! 1. get all rss/atom feed contents
63 | //! 2. List all new entries in these feeds
64 | //! 3. Transform these entries into valid email messages
65 | //! 4. Push these mail messages directly on IMAP server
66 | //!
67 | //! #### `rrss2imap list`
68 | //!
69 | //! Displays a list of the rss feeds. Here is an example
70 | //!
71 | //! ```
72 | //! 0 : http://tontof.net/?rss (to: Nicolas Delsaux (default)) RSS/rrss2imap (default)
73 | //! 1 : https://www.brothers-brick.com/feed/ (to: Nicolas Delsaux (default)) RSS/rrss2imap (default)
74 | //! 2 : https://nicolas-delsaux.hd.free.fr/rss-bridge/?action=display&bridge=LesJoiesDuCode&format=AtomFormat (to: Nicolas Delsaux (default)) RSS/rrss2imap (default)
75 | //! ```
76 | //!
77 | //! Please notice that each entry has an associated number, which is the one to enter when running `rrss2imap delete `
78 | //!
79 | //! #### `config.json` format
80 | //!
81 | //! A typical feedfile will look like this
82 | //!
83 | //! ```json
84 | //! {
85 | //! "settings": {
86 | //! "email": {
87 | //! "server": "the imap server of your mail provider",
88 | //! "user": "your imap user name",
89 | //! "password": "your imap user password",
90 | //! "secure": {
91 | //! "Yes": 993 // Set to "Yes": port for imaps or "No": port for unsecure imap
92 | //! }
93 | //! },
94 | //! // This config is to be used for all feeds
95 | //! "config": {
96 | //! // This is the email address written in each mail sent. It can be different from the email user
97 | //! "email": "Nicolas Delsaux ",
98 | //! // This is the imap folder in which mails will be written
99 | //! "folder": "RSS/rrss2imap"
100 | //! // Setting this to true will force rrss2imap to transform all images into
101 | //! // base64. This prevents images from beind downloaded (and is really cool when reading feeds from a smartphone)
102 | //! // But largely increase each mail size (which can be quite bothering)
103 | //! "inline_image_as_data": true
104 | //! }
105 | //! },
106 | //! "feeds": [
107 | //! {
108 | //! "url": "http://tontof.net/?rss",
109 | //! // This last updated is updated for each entry and should be enough to have rss items correctly read
110 | //! "last_updated": "2019-05-04T16:53:15",
111 | //! "config": {
112 | //! // each config element can be overwritten at the feed level
113 | //! }
114 | //! },
115 | //! ```
116 | //!
117 |
118 | extern crate structopt;
119 | #[macro_use]
120 | extern crate log;
121 | extern crate serde;
122 | #[macro_use]
123 | extern crate serde_derive;
124 | extern crate serde_json;
125 | extern crate flexi_logger;
126 | extern crate treexml;
127 | extern crate chrono;
128 | extern crate rfc822_sanitizer;
129 | extern crate unidecode;
130 | #[macro_use]
131 | extern crate lazy_static;
132 | #[macro_use]
133 | extern crate human_panic;
134 | extern crate lol_html;
135 | extern crate imap;
136 | extern crate base64;
137 | extern crate atom_syndication;
138 | extern crate rss;
139 | extern crate xhtmlchardet;
140 | extern crate url;
141 | extern crate regex;
142 | extern crate custom_error;
143 | use flexi_logger::Logger;
144 | use std::path::PathBuf;
145 | use structopt::StructOpt;
146 | use std::error::Error;
147 |
148 | mod config;
149 | mod export;
150 | mod feed_errors;
151 | mod feed_reader;
152 | mod feed_utils;
153 | mod feed;
154 | mod image_to_data;
155 | mod import;
156 | mod message;
157 | mod settings;
158 | mod store;
159 | mod syndication;
160 |
161 | ///
162 | /// rrss2imap is a script used to transform rss feed entries into mail messages that are directly dropped
163 | /// into your mailbox by the grace of imap protocol
164 | ///
165 | #[derive(Debug, StructOpt)]
166 | #[structopt(author=env!("CARGO_PKG_AUTHORS"))]
167 | struct RRSS2IMAP {
168 | /// Verbose mode (-v, -vv, -vvv)
169 | #[structopt(short, long, parse(from_occurrences))]
170 | verbose: u8,
171 | #[structopt(subcommand)]
172 | cmd: Command
173 | }
174 |
175 | #[derive(Debug, StructOpt)]
176 | enum Command {
177 | /// Creates a new feedfile with the given email address
178 | #[structopt(name = "new")]
179 | New {
180 | /// email the notifications will be sent to
181 | email: String,
182 | },
183 | /// Changes email address used in feed file to be the given one
184 | #[structopt(name = "email")]
185 | Email { email: String },
186 | /// Run feed parsing and transformation
187 | #[structopt(name = "run")]
188 | Run,
189 | /// Adds a new feed given its url.
190 | /// This option can use either named parameters or positional parameters.
191 | /// Although positional parameters may seems simpler to use, they're of a more weird usage
192 | /// (and may be sometimes buggy)
193 | #[structopt(name = "add")]
194 | Add {
195 | /// url of the feed
196 | #[structopt(short = "u", long = "url")]
197 | url:Option,
198 | /// email address to use to forward feed content
199 | #[structopt(short = "e", long = "email")]
200 | email:Option,
201 | /// destination folder of the email
202 | #[structopt(short = "d", long = "destination")]
203 | destination:Option,
204 | /// inline image in this feed (useful only when default is to not include images)
205 | #[structopt(short = "i", long = "inline-mages")]
206 | inline_images:bool,
207 | /// Don't inline image in this feed (useful only when default is to include images)
208 | #[structopt(short = "x", long = "do-not-inline-mages")]
209 | do_not_inline_images:bool,
210 | /// Parameters used to add the feed. Expected parameters are
211 | ///
212 | /// - url of the feed. web page urls are not yet supported. Given as first parameters, **mandatory**
213 | ///
214 | /// - email address to use to forward feed content, **optional**
215 | ///
216 | /// - destination folder of feed content, **optional**
217 | ///
218 | /// Notice parameters have to be given in THIS order.
219 | parameters: Vec,
220 | },
221 | /// List all feeds configured
222 | #[structopt(name = "list")]
223 | List,
224 | /// Reset feedfile (in other words, remove everything)
225 | #[structopt(name = "reset")]
226 | Reset,
227 | /// Delete the given feed
228 | #[structopt(name = "delete")]
229 | Delete {
230 | // index of the feed to delete
231 | feed: u32,
232 | },
233 | /// Export subscriptions as opml file
234 | #[structopt(name = "export")]
235 | Export {
236 | /// Output file, stdout if not present
237 | #[structopt(parse(from_os_str))]
238 | output: Option,
239 | },
240 | /// import the given opml file into subscriptions
241 | #[structopt(name = "import")]
242 | Import {
243 | /// Output file, stdout if not present
244 | #[structopt(parse(from_os_str))]
245 | input: Option,
246 | },
247 | }
248 |
249 | /// Main function simply load the RRSS2IMAP struct from the command-line arguments
250 | pub fn main() -> Result<(), Box> {
251 | if !cfg!(debug_assertions) {
252 | setup_panic!();
253 | }
254 | let opt = RRSS2IMAP::from_args();
255 |
256 | // Configure logger
257 | Logger::try_with_env_or_str(
258 | match opt.verbose {
259 | 0 => "warn, rrss2imap = info",
260 | 1 => "warn, rrss2imap = debug",
261 | 2 => "warn, rrss2imap = trace",
262 | _ => "trace", })
263 | .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e))
264 | .format(match opt.verbose {
265 | 0 => flexi_logger::colored_default_format,
266 | 1 => flexi_logger::colored_default_format,
267 | 2 => flexi_logger::colored_detailed_format,
268 | _ => flexi_logger::colored_with_thread, })
269 | .start()
270 | .unwrap_or_else(|e| panic!("Logger initialization failed with {}", e));
271 |
272 | let store_path = store::find_store();
273 | let store_result = store::Store::load(&store_path);
274 | match store_result {
275 | Ok(mut store) => {
276 | match opt.cmd {
277 | Command::New { email } => store.init_config(email),
278 | Command::Email { email } => store.set_email(email),
279 |
280 | Command::List => store.list(),
281 |
282 | Command::Add { url, email, destination, inline_images, do_not_inline_images, parameters } =>
283 | store.add(url, email, destination, store.settings.config.inline(inline_images, do_not_inline_images), parameters),
284 | Command::Delete { feed } => store.delete(feed),
285 |
286 | Command::Reset => store.reset(),
287 |
288 | Command::Run => store.run(),
289 |
290 | Command::Export { output } => store.export(output),
291 | Command::Import { input } => store.import(input),
292 | }
293 | },
294 | Err(e) => {
295 | error!("Impossible to open store {}\n{}", store_path.to_string_lossy(), e);
296 | }
297 | }
298 | Ok(())
299 | }
300 |
--------------------------------------------------------------------------------
/src/message.css:
--------------------------------------------------------------------------------
1 | img {
2 | max-width: 100% !important;
3 | height: auto;
4 | }
5 |
6 | body,
7 | #body {
8 | font-size: 12pt;
9 | word-wrap: break-word;
10 | -webkit-nbsp-mode: space;
11 | -webkit-line-break: after-white-space;
12 | font-family: Georgia, Times New Roman, Times, serif;
13 | }
14 |
15 | a:link {
16 | color: #0000cc
17 | }
18 |
19 | h1.header a {
20 | font-weight: normal;
21 | text-decoration: none;
22 | color: black;
23 | }
24 |
25 | .summary {
26 | font-size: 80%;
27 | font-style: italic;
28 | }
29 |
--------------------------------------------------------------------------------
/src/message.rs:
--------------------------------------------------------------------------------
1 | use chrono::NaiveDateTime;
2 |
3 | use super::feed::Feed;
4 | use super::image_to_data;
5 | use super::settings::*;
6 | use mail_builder::MessageBuilder;
7 | use custom_error::custom_error;
8 |
9 | custom_error!{pub UnprocessableMessage
10 | CantPutDateInMessage{ value:String } = "EmailMessage can't parse date from {value}",
11 | CantPutFirstAuthorInMessage { value:String } = "Unable to parse first author {value}.
12 | Please consider adding in feed config the \"from\": ... field",
13 | CantWriteTransformedMessage = "Can't re-write transformed message after image Base64'ing"
14 | }
15 |
16 | ///
17 | /// Structure for storing message data prior to having these messages written to IMAP.
18 | /// This structure serves as a common interface for Item/Entry
19 | #[derive(Clone)]
20 | pub struct Message {
21 | /// List of message authors
22 | pub authors: Vec<(String, String)>,
23 | /// Message content. Image extraction should happen BEFORE that storage.
24 | pub content: String,
25 | /// Message id
26 | pub id: String,
27 | pub last_date: NaiveDateTime,
28 | pub links: Vec,
29 | pub title: String,
30 | }
31 |
32 | impl Message {
33 | pub fn write_to_imap(&self, feed: &Feed, settings: &Settings) {
34 | let folder = feed.config.get_folder(&settings.config);
35 | let content = self.build_message(feed, settings);
36 | match content {
37 | Ok(text) => {
38 | debug!("===========================\nWriting message content to IMAP\n{}\n===========================",
39 | text);
40 | match settings.email.append(&folder, &text) {
41 | Ok(_) => debug!("Successfully written {}", self.title),
42 | Err(e) => error!(
43 | "{}\nUnable to select mailbox {}. Item titled {} won't be written",
44 | e, &folder, self.title
45 | ),
46 | }
47 | },
48 | Err(error) => {
49 | warn!("Couldn(t write message {:?} from feed {} due to {}", self.links, feed.url, error);
50 | }
51 | }
52 | }
53 |
54 | fn build_from(&self, feed:&Feed, _settings:&Settings)->(String, String) {
55 | match &feed.config.from {
56 | Some(from) =>(from.to_owned(), from.to_owned()),
57 | None => {
58 | if self.authors.is_empty() {
59 | ("Unkown author".to_owned(), "what@what.com".to_owned())
60 | } else {
61 | self.authors[0].to_owned()
62 | }
63 | }
64 | }
65 | }
66 |
67 | fn build_message(&self, feed: &Feed, settings: &Settings) -> Result {
68 | let content = self.extract_content(feed, settings);
69 | debug!("===========================\nCreating message content\n{}\n===========================", content);
70 | let from = self.build_from(feed, settings);
71 | let _date = self.date_text();
72 | let to_addr = settings.config.email.as_ref().unwrap_or(&settings.email.user);
73 | let email = MessageBuilder::new()
74 | .from(from)
75 | .to(to_addr.as_str())
76 | .subject(str::replace(self.title.as_str(), "\n", ""))
77 | .html_body(content.as_str())
78 | .date(self.last_date.timestamp())
79 | .write_to_string()
80 | .unwrap();
81 | Ok(email)
82 | }
83 |
84 | /// Makes a valid HTML file out of the given Item.
85 | /// This method provides all the transformation that should happen
86 | fn extract_content(&self, _feed: &Feed, _settings: &Settings) -> String {
87 | let style = include_str!("message.css");
88 | let title = format!("