├── VERSION
├── .envrc
├── frontend
├── src
│ ├── assets
│ │ ├── .gitkeep
│ │ ├── images
│ │ │ ├── .gitkeep
│ │ │ └── logo.png
│ │ ├── desc-search-options.xml
│ │ └── desc-search-packages.xml
│ ├── index.js
│ ├── Utils.elm
│ ├── Page
│ │ ├── Home.elm
│ │ ├── Flakes.elm
│ │ └── Options.elm
│ ├── index.html
│ ├── Route
│ │ └── SearchQuery.elm
│ ├── Route.elm
│ ├── index.scss
│ └── Main.elm
├── registry.dat
├── versions.dat
├── netlify.toml
├── elm.json
├── config
│ ├── webpack.dev.js
│ ├── webpack.prod.js
│ └── webpack.common.js
├── default.nix
├── tests
│ └── Example.elm
├── package.json
└── elm-srcs.nix
├── flake-info
├── .cargo
│ └── config.toml
├── examples
│ ├── examples.txt
│ ├── examples.in.json
│ ├── pull.sh
│ ├── github:W95Psp-LiterateFStar.json
│ ├── github:serokell-deploy-rs.json
│ ├── .-..json
│ ├── github:ngi-nix-openpgp-ca.json
│ └── adaspark-offen.json
├── src
│ ├── data
│ │ ├── mod.rs
│ │ ├── system.rs
│ │ ├── flake.rs
│ │ ├── pandoc.rs
│ │ ├── source.rs
│ │ ├── prettyprint.rs
│ │ ├── utility.rs
│ │ ├── import.rs
│ │ └── export.rs
│ ├── commands
│ │ ├── mod.rs
│ │ ├── nix_check_version.rs
│ │ ├── nix_flake_info.rs
│ │ ├── nix_flake_attrs.rs
│ │ └── nixpkgs_info.rs
│ ├── lib.rs
│ └── bin
│ │ └── flake-info.rs
├── assets
│ ├── data
│ │ └── fix-xrefs.lua
│ └── commands
│ │ └── flake_info.nix
├── Cargo.toml
├── default.nix
└── README.md
├── overlay.nix
├── .gitignore
├── .github
├── dependabot.yml
├── workflows
│ ├── update-flake-lock.yml
│ ├── build-flake-info.yml
│ ├── frontend.yml
│ ├── check-flake-files.yml
│ └── import-to-elasticsearch.yml
└── actions
│ └── common-setup
│ └── action.yml
├── flake.lock
├── flakes
└── manual.toml
├── README.md
└── flake.nix
/VERSION:
--------------------------------------------------------------------------------
1 | 32
2 |
--------------------------------------------------------------------------------
/.envrc:
--------------------------------------------------------------------------------
1 | use flake
2 |
--------------------------------------------------------------------------------
/frontend/src/assets/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/frontend/src/assets/images/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/flake-info/.cargo/config.toml:
--------------------------------------------------------------------------------
1 | [env]
2 | MIN_NIX_VERSION = "2.4.0" # flakes
3 |
--------------------------------------------------------------------------------
/frontend/registry.dat:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CoR/nixos-search/main/frontend/registry.dat
--------------------------------------------------------------------------------
/frontend/versions.dat:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CoR/nixos-search/main/frontend/versions.dat
--------------------------------------------------------------------------------
/frontend/netlify.toml:
--------------------------------------------------------------------------------
1 | [[redirects]]
2 | from = "/*"
3 | to = "/index.html"
4 | status = 200
5 |
--------------------------------------------------------------------------------
/frontend/src/assets/images/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/CoR/nixos-search/main/frontend/src/assets/images/logo.png
--------------------------------------------------------------------------------
/flake-info/examples/examples.txt:
--------------------------------------------------------------------------------
1 | github:serokell/deploy-rs
2 | github:W95Psp/LiterateFStar
3 | github:ngi-nix/openpgp-ca
4 | ./.
5 |
--------------------------------------------------------------------------------
/overlay.nix:
--------------------------------------------------------------------------------
1 | final: prev:
2 | {
3 | nixos-search = {
4 | frontend = import ./frontend { pkgs = prev; };
5 | flake-info = import ./flake-info { pkgs = prev; };
6 | };
7 | }
8 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.log
2 | .DS_Store
3 | .cache
4 | .idea
5 | .vscode
6 | .node_repl_history
7 | .npm
8 | target/
9 | build/Release
10 | dist
11 | elm-stuff/
12 | eval-*
13 | ignore
14 | logs
15 | node_modules
16 | npm-debug.log*
17 | repl-temp-*
18 | result
19 | src-url
20 | .direnv/
21 |
--------------------------------------------------------------------------------
/flake-info/src/data/mod.rs:
--------------------------------------------------------------------------------
1 | mod export;
2 | mod flake;
3 | pub mod import;
4 | mod pandoc;
5 | mod prettyprint;
6 | mod source;
7 | mod system;
8 | mod utility;
9 |
10 | pub use export::Export;
11 | pub use flake::{Flake, Repo};
12 | pub use source::{FlakeRef, Hash, Nixpkgs, Source};
13 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 |
4 | - package-ecosystem: github-actions
5 | directory: "/"
6 | schedule:
7 | interval: daily
8 | time: '00:00'
9 | timezone: UTC
10 | open-pull-requests-limit: 10
11 | commit-message:
12 | prefix: "Dependabot"
13 | include: "scope"
14 |
--------------------------------------------------------------------------------
/flake-info/examples/examples.in.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "type": "git",
4 | "url": "github:fluffynukeit/adaspark"
5 | },
6 | {
7 | "type": "github",
8 | "owner": "ngi-nix",
9 | "repo": "offen",
10 | "git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa",
11 | "description": "Hier könnte Ihre Werbung stehen"
12 | }
13 | ]
14 |
--------------------------------------------------------------------------------
/flake-info/examples/pull.sh:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env bash
2 |
3 | # Run from cargo root as
4 | # $ ./examples/pull.sh
5 |
6 | echo "pulling examples in examples.txt"
7 | examples=$(cat ./examples/examples.txt)
8 | for flake in $examples; do
9 |
10 | cargo run -- --flake "$flake" | jq > examples/"$(echo "$flake" | tr "/" "-")".json
11 |
12 | done
13 |
14 | echo "pulling excamples using json file"
15 | cargo run -- --targets ./examples/examples.in.json | jq > examples/adaspark-offen.json
16 |
--------------------------------------------------------------------------------
/flake-info/src/commands/mod.rs:
--------------------------------------------------------------------------------
1 | mod nix_check_version;
2 | mod nix_flake_attrs;
3 | mod nix_flake_info;
4 | mod nixpkgs_info;
5 | pub use nix_check_version::{check_nix_version, NixCheckError};
6 | pub use nix_flake_attrs::get_derivation_info;
7 | pub use nix_flake_info::get_flake_info;
8 | pub use nixpkgs_info::{get_nixpkgs_info, get_nixpkgs_options};
9 |
10 | use lazy_static::lazy_static;
11 | use std::path::PathBuf;
12 |
13 | lazy_static! {
14 | static ref EXTRACT_SCRIPT: PathBuf = crate::DATADIR.join("commands/flake_info.nix");
15 | }
16 |
--------------------------------------------------------------------------------
/frontend/src/index.js:
--------------------------------------------------------------------------------
1 | 'use strict';
2 |
3 | require("./index.scss");
4 |
5 | const {Elm} = require('./Main');
6 |
7 | Elm.Main.init({
8 | flags: {
9 | elasticsearchMappingSchemaVersion: parseInt(process.env.ELASTICSEARCH_MAPPING_SCHEMA_VERSION),
10 | elasticsearchUrl: process.env.ELASTICSEARCH_URL || 'https://nixos-search-7-1733963800.us-east-1.bonsaisearch.net:443',
11 | elasticsearchUsername : process.env.ELASTICSEARCH_USERNAME || 'aWVSALXpZv',
12 | elasticsearchPassword : process.env.ELASTICSEARCH_PASSWORD || 'X8gPHnzL52wFEekuxsfQ9cSh',
13 | nixosChannels : JSON.parse(process.env.NIXOS_CHANNELS)
14 | }
15 | });
16 |
--------------------------------------------------------------------------------
/.github/workflows/update-flake-lock.yml:
--------------------------------------------------------------------------------
1 | name: update-flake-lock
2 | on:
3 | workflow_dispatch: # allows manual triggering
4 | schedule:
5 | - cron: '0 0 * * 0' # runs weekly on Sunday at 00:00
6 |
7 | jobs:
8 | lockfile:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - name: Checkout repository
12 | uses: actions/checkout@v3
13 | - name: Install Nix
14 | uses: cachix/install-nix-action@v18
15 | with:
16 | extra_nix_config: |
17 | access-tokens = github.com=${{ secrets.GITHUB_TOKEN }}
18 | - name: Update flake.lock
19 | uses: DeterminateSystems/update-flake-lock@v14
20 |
--------------------------------------------------------------------------------
/frontend/src/Utils.elm:
--------------------------------------------------------------------------------
1 | module Utils exposing
2 | ( showHtml
3 | , toggleList
4 | )
5 |
6 | import Html.Parser
7 | import Html.Parser.Util
8 |
9 |
10 | toggleList :
11 | List a
12 | -> a
13 | -> List a
14 | toggleList list item =
15 | if List.member item list then
16 | List.filter (\x -> x /= item) list
17 |
18 | else
19 | List.append list [ item ]
20 |
21 |
22 | showHtml value =
23 | case Html.Parser.run <| String.trim value of
24 | Ok [ Html.Parser.Element "rendered-html" _ nodes ] ->
25 | Just <| Html.Parser.Util.toVirtualDom nodes
26 |
27 | _ ->
28 | Nothing
29 |
--------------------------------------------------------------------------------
/frontend/src/assets/desc-search-options.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | NixOS options
5 | Search NixOS options by name or description.
6 | UTF-8
7 | https://nixos.org/favicon.png
8 |
9 | https://search.nixos.org/options
10 |
11 |
--------------------------------------------------------------------------------
/frontend/src/assets/desc-search-packages.xml:
--------------------------------------------------------------------------------
1 |
2 |
4 | NixOS packages
5 | Search NixOS packages by name or description.
6 | UTF-8
7 | https://nixos.org/favicon.png
8 |
9 | https://search.nixos.org/packages
10 |
11 |
--------------------------------------------------------------------------------
/.github/workflows/build-flake-info.yml:
--------------------------------------------------------------------------------
1 | name: "Build flake-info"
2 | on:
3 | pull_request:
4 | paths:
5 | - "flake.nix"
6 | - "flake.lock"
7 | - "flake-info/**"
8 |
9 | push:
10 | branches:
11 | - main
12 |
13 | jobs:
14 | build-flake-info:
15 | runs-on: ubuntu-latest
16 | steps:
17 | - name: Checking out the repository
18 | uses: actions/checkout@v3
19 |
20 | - name: Setup
21 | uses: ./.github/actions/common-setup
22 | with:
23 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
24 | CACHIX_SIGNING_KEY: ${{ secrets.CACHIX_SIGNING_KEY }}
25 |
26 |
27 |
28 | - name: Building flake-info
29 | run: |
30 | nix -vL build .#flake-info
31 |
--------------------------------------------------------------------------------
/frontend/src/Page/Home.elm:
--------------------------------------------------------------------------------
1 | module Page.Home exposing (Model, Msg, init, update, view)
2 |
3 | import Html exposing (Html, div, text)
4 | import Search exposing (NixOSChannel)
5 |
6 |
7 |
8 | -- MODEL
9 |
10 |
11 | type alias Model =
12 | ()
13 |
14 |
15 | init : ( Model, Cmd Msg )
16 | init =
17 | ( (), Cmd.none )
18 |
19 |
20 |
21 | -- UPDATE
22 |
23 |
24 | type Msg
25 | = NoOp
26 |
27 |
28 | update :
29 | Msg
30 | -> Model
31 | -> List NixOSChannel
32 | -> ( Model, Cmd Msg )
33 | update msg model _ =
34 | case msg of
35 | NoOp ->
36 | ( model, Cmd.none )
37 |
38 |
39 |
40 | -- VIEW
41 |
42 |
43 | view : Model -> Html Msg
44 | view _ =
45 | div [] [ text "Home" ]
46 |
--------------------------------------------------------------------------------
/flake-info/assets/data/fix-xrefs.lua:
--------------------------------------------------------------------------------
1 | --[[
2 | Resolve cross-references to NixOS options in a hacky way and link them to the
3 | unstable channel's option search page on search.nixos.org
4 | ]]
5 |
6 | function Link(elem)
7 | prefix = '#opt-'
8 | if elem.target:sub(1, #prefix) == prefix then
9 | option_name = elem.target:sub(#prefix + 1)
10 | option_name = option_name:gsub('%._name_%.', '..')
11 | option_name = option_name:gsub('%._%.', '.*.')
12 |
13 | elem.target = 'https://search.nixos.org/options?channel=unstable&show=' .. option_name .. '&query=' .. option_name
14 |
15 | if #elem.content == 1 and elem.content[1].tag == 'Str' and elem.content[1].text == '???' then
16 | elem.content[1].text = option_name
17 | end
18 |
19 | return elem
20 | end
21 | end
22 |
--------------------------------------------------------------------------------
/.github/actions/common-setup/action.yml:
--------------------------------------------------------------------------------
1 | name: Setup Environment
2 | inputs:
3 | GITHUB_TOKEN:
4 | required: true
5 | description: "GitHub access token used to prevent GitHub's rate limit for unauthenticated requests"
6 | CACHIX_SIGNING_KEY: # determines what node version to install
7 | required: true
8 | description: 'Cachix Signing Key'
9 | runs:
10 | using: "composite"
11 | steps:
12 |
13 | - name: Installing Nix
14 | uses: cachix/install-nix-action@v18
15 | with:
16 | nix_path: nixpkgs=channel:nixpkgs-unstable
17 | extra_nix_config: |
18 | access-tokens = github.com=${{ inputs.GITHUB_TOKEN }}
19 |
20 | - uses: cachix/cachix-action@v12
21 | with:
22 | name: nixos-search
23 | signingKey: '${{ inputs.CACHIX_SIGNING_KEY }}'
24 |
25 | - name: Installing jq
26 | shell: bash
27 | run: |
28 | nix-env -f '' -iA jq
29 |
--------------------------------------------------------------------------------
/flake-info/src/data/system.rs:
--------------------------------------------------------------------------------
1 | use serde::{Deserialize, Serialize};
2 |
3 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
4 | #[serde(untagged)]
5 | pub enum System {
6 | Plain(String),
7 | Detailed { cpu: Cpu, kernel: Kernel },
8 | }
9 |
10 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
11 | pub struct Cpu {
12 | family: String,
13 | }
14 |
15 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
16 | pub struct Kernel {
17 | name: String,
18 | }
19 |
20 | impl ToString for System {
21 | fn to_string(&self) -> String {
22 | match self {
23 | System::Plain(system) => system.to_owned(),
24 | System::Detailed { cpu, kernel } => format!("{}-{}", cpu.family, kernel.name),
25 | }
26 | }
27 | }
28 |
29 | #[derive(Debug, PartialEq, Serialize, Deserialize)]
30 | pub struct InstancePlatform {
31 | system: System,
32 | version: String,
33 | }
34 |
--------------------------------------------------------------------------------
/frontend/elm.json:
--------------------------------------------------------------------------------
1 | {
2 | "type": "application",
3 | "source-directories": [
4 | "src"
5 | ],
6 | "elm-version": "0.19.1",
7 | "dependencies": {
8 | "direct": {
9 | "NoRedInk/elm-json-decode-pipeline": "1.0.0",
10 | "elm/browser": "1.0.2",
11 | "elm/core": "1.0.4",
12 | "elm/html": "1.0.0",
13 | "elm/http": "2.0.0",
14 | "elm/json": "1.1.3",
15 | "elm/regex": "1.0.0",
16 | "elm/url": "1.0.0",
17 | "hecrj/html-parser": "2.3.4",
18 | "krisajenkins/remotedata": "6.0.1",
19 | "truqu/elm-base64": "2.0.4"
20 | },
21 | "indirect": {
22 | "elm/bytes": "1.0.8",
23 | "elm/file": "1.0.5",
24 | "elm/parser": "1.1.0",
25 | "elm/time": "1.0.0",
26 | "elm/virtual-dom": "1.0.2",
27 | "rtfeldman/elm-hex": "1.0.0"
28 | }
29 | },
30 | "test-dependencies": {
31 | "direct": {
32 | "elm-explorations/test": "1.2.2"
33 | },
34 | "indirect": {
35 | "elm/random": "1.0.0",
36 | "elm/svg": "1.0.1"
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/frontend/config/webpack.dev.js:
--------------------------------------------------------------------------------
1 | const path = require('path');
2 |
3 | const {merge} = require('webpack-merge');
4 | const common = require('./webpack.common.js');
5 |
6 |
7 | const dev = {
8 | mode: 'development',
9 | devServer: {
10 | hot: "only",
11 | client: {
12 | logging: "info"
13 | },
14 | static: {directory: path.join(__dirname, "../src/assets")},
15 | devMiddleware: {
16 | publicPath: "/",
17 | stats: "errors-only"
18 | },
19 | historyApiFallback: true,
20 | // feel free to delete this section if you don't need anything like this
21 | onBeforeSetupMiddleware: function (devServer) {
22 | // on port 3000
23 | devServer.app.get("/test", function (req, res) {
24 | res.json({result: "You reached the dev server"});
25 | });
26 |
27 | }
28 | },
29 | };
30 |
31 | module.exports = env => {
32 | const withDebug = !env.nodebug;
33 | return merge(common(withDebug), dev);
34 | }
35 |
--------------------------------------------------------------------------------
/flake-info/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "flake-info"
3 | version = "0.3.0"
4 | authors = ["Yannik Sander "]
5 | edition = "2018"
6 |
7 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
8 |
9 | [dependencies]
10 | clap = "^2.33"
11 | serde = {version="1.0", features = ["derive"]}
12 | serde_json = "1.0"
13 | serde_path_to_error = "0.1.5"
14 | toml = "0.5"
15 | anyhow = { version= "1.0", features = ["backtrace"] }
16 | thiserror = "1.0"
17 | structopt = "0.3"
18 | command-run = "0.13"
19 | env_logger = "0.9"
20 | log = "0.4"
21 | lazy_static = "1.4"
22 | fancy-regex = "0.6"
23 | tokio = { version = "*", features = ["full"] }
24 | reqwest = { version = "0.11", features = ["json", "blocking"] }
25 | sha2 = "0.9"
26 | pandoc = "0.8.10"
27 | semver = "1.0"
28 |
29 | elasticsearch = {git = "https://github.com/elastic/elasticsearch-rs", features = ["rustls-tls"], optional = true}
30 |
31 | [features]
32 | default = ["elastic"]
33 | elastic = ["elasticsearch"]
34 |
35 | [lib]
36 | name = "flake_info"
37 | path = "./src/lib.rs"
38 |
--------------------------------------------------------------------------------
/flake-info/default.nix:
--------------------------------------------------------------------------------
1 | { pkgs ? import {}
2 | , nixosChannels ? {}
3 | }:
4 | pkgs.rustPlatform.buildRustPackage rec {
5 | name = "flake-info";
6 | src = ./.;
7 | cargoLock = {
8 | lockFile = ./Cargo.lock;
9 | outputHashes = {
10 | "elasticsearch-8.0.0-alpha.1" = "sha256-gjmk3Q3LTAvLhzQ+k1knSp1HBwtqNiubjXNnLy/cS5M=";
11 | };
12 | };
13 | nativeBuildInputs = with pkgs; [ pkg-config ];
14 | buildInputs =
15 | with pkgs; [
16 | openssl
17 | openssl.dev
18 | makeWrapper
19 | ] ++ lib.optional pkgs.stdenv.isDarwin [
20 | libiconv
21 | darwin.apple_sdk.frameworks.Security
22 | ];
23 |
24 | checkInputs = with pkgs; [ pandoc ];
25 |
26 | ROOTDIR = builtins.placeholder "out";
27 | NIXPKGS_PANDOC_FILTERS_PATH = "${pkgs.path + "/doc/build-aux/pandoc-filters"}";
28 |
29 | checkFlags = [
30 | "--skip elastic::tests"
31 | ];
32 |
33 | postInstall = ''
34 | cp -rt "$out" assets
35 |
36 | wrapProgram $out/bin/flake-info \
37 | --set NIXOS_CHANNELS '${builtins.toJSON nixosChannels}' \
38 | --prefix PATH : ${pkgs.pandoc}/bin
39 | '';
40 | }
41 |
--------------------------------------------------------------------------------
/frontend/src/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | NixOS Search
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
--------------------------------------------------------------------------------
/frontend/default.nix:
--------------------------------------------------------------------------------
1 | { pkgs ? import { }
2 | , nixosChannels
3 | , version
4 | }:
5 | pkgs.npmlock2nix.build {
6 | src = ./.;
7 | installPhase = ''
8 | mkdir $out
9 | cp -R dist/* $out/
10 | cp netlify.toml $out/
11 | '';
12 | postConfigure = pkgs.elmPackages.fetchElmDeps {
13 | elmPackages = import ./elm-srcs.nix;
14 | elmVersion = pkgs.elmPackages.elm.version;
15 | registryDat = ./registry.dat;
16 | };
17 | ELASTICSEARCH_MAPPING_SCHEMA_VERSION = version;
18 | NIXOS_CHANNELS = builtins.toJSON nixosChannels;
19 | buildCommands = [
20 | "HOME=$PWD npm run prod"
21 | ];
22 | buildInputs =
23 | (with pkgs; [
24 | nodejs
25 | elm2nix
26 | ]) ++
27 | (with pkgs.elmPackages; [
28 | elm
29 | elm-format
30 | elm-language-server
31 | elm-test
32 | ]);
33 | node_modules_attrs = {
34 | sourceOverrides = {
35 | elm = sourceIngo: drv: drv.overrideAttrs (old: {
36 | postPatch = ''
37 | sed -i -e "s|download(|//download(|" install.js
38 | sed -i -e "s|request(|//request(|" download.js
39 | sed -i -e "s|var version|return; var version|" download.js
40 | cp ${pkgs.elmPackages.elm}/bin/elm bin/elm
41 | '';
42 | });
43 | };
44 | };
45 | }
46 |
--------------------------------------------------------------------------------
/flake-info/examples/github:W95Psp-LiterateFStar.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "flake_description": "LiterateFStar",
4 | "flake_resolved": {
5 | "type": "github",
6 | "owner": "W95Psp",
7 | "repo": "LiterateFStar"
8 | },
9 | "flake_name": "LiterateFStar",
10 | "flake_source": {
11 | "type": "git",
12 | "url": "github:W95Psp/LiterateFStar"
13 | },
14 | "package_attr_name": "fstar",
15 | "package_pname": "fstar-c671957efe8769b8fc421cd3e9da47b3fa57d510",
16 | "package_pversion": "",
17 | "package_platforms": [
18 | "x86_64-linux",
19 | "x86_64-darwin"
20 | ],
21 | "package_outputs": [
22 | "out"
23 | ],
24 | "package_license": {}
25 | },
26 | {
27 | "flake_description": "LiterateFStar",
28 | "flake_resolved": {
29 | "type": "github",
30 | "owner": "W95Psp",
31 | "repo": "LiterateFStar"
32 | },
33 | "flake_name": "LiterateFStar",
34 | "flake_source": {
35 | "type": "git",
36 | "url": "github:W95Psp/LiterateFStar"
37 | },
38 | "app_bin": "/nix/store/mwwn9wzbgkdfac4ijj176akbkr9bxk5k-build",
39 | "app_attr_name": "build",
40 | "app_platforms": [
41 | "x86_64-linux",
42 | "x86_64-darwin"
43 | ],
44 | "app_type": "derivation"
45 | }
46 | ]
47 |
--------------------------------------------------------------------------------
/flake-info/examples/github:serokell-deploy-rs.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "flake_description": "A Simple multi-profile Nix-flake deploy tool.",
4 | "flake_resolved": {
5 | "type": "github",
6 | "owner": "serokell",
7 | "repo": "deploy-rs"
8 | },
9 | "flake_name": "deploy-rs",
10 | "flake_source": {
11 | "type": "git",
12 | "url": "github:serokell/deploy-rs"
13 | },
14 | "package_attr_name": "deploy-rs",
15 | "package_pname": "deploy-rs-0.1.0",
16 | "package_pversion": "0.1.0",
17 | "package_platforms": [
18 | "x86_64-linux",
19 | "x86_64-darwin",
20 | "i686-linux",
21 | "aarch64-linux"
22 | ],
23 | "package_outputs": [
24 | "out"
25 | ],
26 | "package_license": {}
27 | },
28 | {
29 | "flake_description": "A Simple multi-profile Nix-flake deploy tool.",
30 | "flake_resolved": {
31 | "type": "github",
32 | "owner": "serokell",
33 | "repo": "deploy-rs"
34 | },
35 | "flake_name": "deploy-rs",
36 | "flake_source": {
37 | "type": "git",
38 | "url": "github:serokell/deploy-rs"
39 | },
40 | "app_bin": "/nix/store/lw8c19dkrr8a766qbl89nsfwbgwhp43q-deploy-rs-0.1.0/bin/deploy",
41 | "app_attr_name": "deploy-rs",
42 | "app_platforms": [
43 | "x86_64-linux",
44 | "x86_64-darwin",
45 | "i686-linux",
46 | "aarch64-linux"
47 | ],
48 | "app_type": "app"
49 | }
50 | ]
51 |
--------------------------------------------------------------------------------
/flake-info/examples/.-..json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "flake_description": "Extracting information from flakes",
4 | "flake_resolved": {
5 | "type": "git",
6 | "url": "file:///Volumes/projects/Uni/courses/kth/DD2476-Search-Engines-and-Information-Retrieval-Systems/project?dir=flake-info"
7 | },
8 | "flake_name": "",
9 | "flake_source": {
10 | "type": "git",
11 | "url": "./."
12 | },
13 | "package_attr_name": "flake-info",
14 | "package_pname": "flake-info",
15 | "package_pversion": "",
16 | "package_platforms": [
17 | "x86_64-linux",
18 | "x86_64-darwin",
19 | "i686-linux",
20 | "aarch64-linux"
21 | ],
22 | "package_outputs": [
23 | "out"
24 | ],
25 | "package_license": {}
26 | },
27 | {
28 | "flake_description": "Extracting information from flakes",
29 | "flake_resolved": {
30 | "type": "git",
31 | "url": "file:///Volumes/projects/Uni/courses/kth/DD2476-Search-Engines-and-Information-Retrieval-Systems/project?dir=flake-info"
32 | },
33 | "flake_name": "",
34 | "flake_source": {
35 | "type": "git",
36 | "url": "./."
37 | },
38 | "app_bin": "/nix/store/4akx0is6fgh9ci2ak5sbskwzykr0xj85-flake-info/bin/flake-info",
39 | "app_attr_name": "flake-info",
40 | "app_platforms": [
41 | "x86_64-linux",
42 | "x86_64-darwin",
43 | "i686-linux",
44 | "aarch64-linux"
45 | ],
46 | "app_type": "app"
47 | }
48 | ]
49 |
--------------------------------------------------------------------------------
/flake-info/src/commands/nix_check_version.rs:
--------------------------------------------------------------------------------
1 | use command_run::Command;
2 | use log::info;
3 | use semver::{Version, VersionReq};
4 | use thiserror::Error;
5 |
6 | #[derive(Debug, Error)]
7 | pub enum NixCheckError {
8 | #[error("Installed nix doesn't match version requirement: {0} (required {1})")]
9 | IncompatibleNixVersion(Version, VersionReq),
10 |
11 | #[error("SemVer error (this should not occur, please file a bug report): {0}")]
12 | CheckError(#[from] semver::Error),
13 |
14 | #[error("Failed to run nix command: {0}")]
15 | CommandError(#[from] command_run::Error),
16 | }
17 |
18 | pub fn check_nix_version(min_version: &str) -> Result<(), NixCheckError> {
19 | info!("Checking nix version");
20 |
21 | let nix_version_requirement = VersionReq::parse(&format!(">={}", min_version))?;
22 |
23 | let mut command =
24 | Command::with_args("nix", &["eval", "--raw", "--expr", "builtins.nixVersion"]);
25 | command.log_command = false;
26 | command.enable_capture();
27 | let output = command.run()?;
28 | let nix_version = Version::parse(
29 | output
30 | .stdout_string_lossy()
31 | .split(|c: char| c != '.' && !c.is_ascii_digit())
32 | .next()
33 | .unwrap(),
34 | )?;
35 | if !nix_version_requirement.matches(&nix_version) {
36 | return Err(NixCheckError::IncompatibleNixVersion(
37 | nix_version,
38 | nix_version_requirement,
39 | ));
40 | }
41 | Ok(())
42 | }
43 |
--------------------------------------------------------------------------------
/.github/workflows/frontend.yml:
--------------------------------------------------------------------------------
1 | name: "Frontend: Build & Deploy to Netlify"
2 | on:
3 | pull_request:
4 | paths:
5 | - "flake.nix"
6 | - "flake.lock"
7 | - "frontend/**"
8 | - "VERSION"
9 |
10 | push:
11 | branches:
12 | - main
13 |
14 | jobs:
15 | build-and-deploy:
16 | runs-on: ubuntu-latest
17 | steps:
18 | - name: Checking out the repository
19 | uses: actions/checkout@v3
20 |
21 | - name: Setup
22 | uses: ./.github/actions/common-setup
23 | with:
24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
25 | CACHIX_SIGNING_KEY: ${{ secrets.CACHIX_SIGNING_KEY }}
26 |
27 |
28 |
29 | - name: Building search.nixos.org
30 | run: |
31 | nix -vL build .#frontend
32 | mkdir ./dist
33 | cp -RL ./result/* ./dist/
34 |
35 | - name: Deploy to Netlify
36 | uses: nwtgck/actions-netlify@v1.2
37 | env:
38 | NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
39 | NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }}
40 | with:
41 | production-branch: 'main'
42 | production-deploy: ${{ github.event_name == 'push' }}
43 | publish-dir: './dist'
44 | github-token: ${{ secrets.GITHUB_TOKEN }}
45 | deploy-message: 'Deploy from GitHub Actions'
46 | enable-pull-request-comment: true
47 | overwrites-pull-request-comment: true
48 | enable-commit-comment: false
49 | enable-commit-status: true
50 | if: github.repository_owner == 'NixOS'
51 |
--------------------------------------------------------------------------------
/flake-info/examples/github:ngi-nix-openpgp-ca.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "flake_description": "OpenPGP CA is a tool for managing OpenPGP keys within an organization.",
4 | "flake_resolved": {
5 | "type": "github",
6 | "owner": "ngi-nix",
7 | "repo": "openpgp-ca"
8 | },
9 | "flake_name": "openpgp-ca",
10 | "flake_source": {
11 | "type": "git",
12 | "url": "github:ngi-nix/openpgp-ca"
13 | },
14 | "package_attr_name": "openpgp-ca",
15 | "package_pname": "openpgp-ca",
16 | "package_pversion": "20200717",
17 | "package_platforms": [
18 | "x86_64-linux",
19 | "x86_64-darwin"
20 | ],
21 | "package_outputs": [
22 | "out"
23 | ],
24 | "package_description": "OpenPGP CA is a tool for managing OpenPGP keys within an organization.",
25 | "package_license": {}
26 | },
27 | {
28 | "flake_description": "OpenPGP CA is a tool for managing OpenPGP keys within an organization.",
29 | "flake_resolved": {
30 | "type": "github",
31 | "owner": "ngi-nix",
32 | "repo": "openpgp-ca"
33 | },
34 | "flake_name": "openpgp-ca",
35 | "flake_source": {
36 | "type": "git",
37 | "url": "github:ngi-nix/openpgp-ca"
38 | },
39 | "package_attr_name": "openpgp-ca-docker",
40 | "package_pname": "docker-image-openpgp-ca.tar.gz",
41 | "package_pversion": "",
42 | "package_platforms": [
43 | "x86_64-linux"
44 | ],
45 | "package_outputs": [
46 | "out"
47 | ],
48 | "package_license": {}
49 | }
50 | ]
51 |
--------------------------------------------------------------------------------
/flake-info/src/commands/nix_flake_info.rs:
--------------------------------------------------------------------------------
1 | use anyhow::{Context, Result};
2 | use command_run::{Command, LogTo};
3 | use std::fmt::Display;
4 | use std::path::PathBuf;
5 |
6 | use crate::data::Flake;
7 |
8 | /// Uses `nix` to fetch the provided flake and read general information
9 | /// about it using `nix flake metadata`
10 | pub fn get_flake_info + Display>(
11 | flake_ref: T,
12 | temp_store: bool,
13 | extra: &[String],
14 | ) -> Result {
15 | let args = ["flake", "metadata", "--json", "--no-write-lock-file"];
16 | let mut command = Command::with_args("nix", args);
17 | command.add_arg(flake_ref.as_ref());
18 | if temp_store {
19 | let temp_store_path = PathBuf::from("/tmp/flake-info-store");
20 | if !temp_store_path.exists() {
21 | std::fs::create_dir_all(&temp_store_path)
22 | .with_context(|| "Couldn't create temporary store path")?;
23 | }
24 | command.add_arg_pair("--store", temp_store_path.canonicalize()?);
25 | }
26 | command.add_args(extra);
27 | command.enable_capture();
28 | command.log_to = LogTo::Log;
29 | command.log_output_on_error = true;
30 |
31 | command
32 | .run()
33 | .with_context(|| format!("Failed to gather information about {}", flake_ref))
34 | .and_then(|o| {
35 | let deserialized: Result =
36 | serde_json::de::from_str(o.stdout_string_lossy().to_string().as_str());
37 | Ok(deserialized?.resolve_name())
38 | })
39 | }
40 |
--------------------------------------------------------------------------------
/.github/workflows/check-flake-files.yml:
--------------------------------------------------------------------------------
1 | name: "Check Flake Groups"
2 |
3 | on:
4 | workflow_dispatch:
5 | pull_request:
6 | paths:
7 | - "flakes/**.toml"
8 |
9 | jobs:
10 | automatic-custom-flakes-check:
11 | runs-on: ubuntu-latest
12 |
13 | strategy:
14 | fail-fast: true
15 |
16 | env:
17 | RUST_LOG: debug
18 |
19 | steps:
20 |
21 | - name: Checking out the repository
22 | uses: actions/checkout@v3
23 |
24 | - name: Setup
25 | uses: ./.github/actions/common-setup
26 | with:
27 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
28 | CACHIX_SIGNING_KEY: ${{ secrets.CACHIX_SIGNING_KEY }}
29 |
30 | - name: Try importing all custom flakes
31 | run: |
32 | shopt -s globstar
33 |
34 | had_error=0
35 |
36 | for flake_group in flakes/**/*.toml
37 | do
38 | echo "::group::Group \"$(basename $flake_group .toml)\""
39 |
40 | nix run .#flake-info -- group "$flake_group" "$(basename "$flake_group" .toml)" --report
41 |
42 | if [[ -f "./report.txt" ]]
43 | then
44 | had_error=1
45 |
46 | # sic.:
47 | # Workaround for multi line output
48 | report="$(< ./report.txt)"
49 | report="${report//'%'/'%25'}"
50 | report="${report//$'\n'/'%0A'}"
51 | report="${report//$'\r'/'%0D'}"
52 |
53 | echo "::error file=$flake_group::$report"
54 | fi
55 |
56 | echo ::endgroup::
57 |
58 | done
59 | exit $had_error
60 |
--------------------------------------------------------------------------------
/frontend/tests/Example.elm:
--------------------------------------------------------------------------------
1 | module Example exposing (fuzzTest, unitTest, viewTest)
2 |
3 | import Expect exposing (Expectation)
4 | import Fuzz exposing (Fuzzer, int, list, string)
5 | import Main exposing (..)
6 | import Test exposing (..)
7 | import Test.Html.Query as Query
8 | import Test.Html.Selector exposing (tag, text)
9 |
10 |
11 | {-| See
12 | -}
13 | unitTest : Test
14 | unitTest =
15 | describe "simple unit test"
16 | [ test "Inc adds one" <|
17 | \() ->
18 | update Inc (Model 0 "")
19 | |> Tuple.first
20 | |> .counter
21 | |> Expect.equal 1
22 | ]
23 |
24 |
25 | {-| See
26 | -}
27 | fuzzTest : Test
28 | fuzzTest =
29 | describe "simple fuzz test"
30 | [ fuzz int "Inc ALWAYS adds one" <|
31 | \ct ->
32 | update Inc (Model ct "")
33 | |> Tuple.first
34 | |> .counter
35 | |> Expect.equal (ct + 1)
36 | ]
37 |
38 |
39 | {-| see
40 | -}
41 | viewTest : Test
42 | viewTest =
43 | describe "Testing view function"
44 | [ test "Button has the expected text" <|
45 | \() ->
46 | Model 0 ""
47 | |> view
48 | |> Query.fromHtml
49 | |> Query.findAll [ tag "button" ]
50 | |> Query.first
51 | |> Query.has [ text "+ 1" ]
52 | ]
53 |
--------------------------------------------------------------------------------
/frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "nixos-search",
3 | "version": "1.0.0",
4 | "description": "Search NixOS packages and options.",
5 | "author": "Rok Garbas ",
6 | "license": "MIT",
7 | "repository": {
8 | "type": "git",
9 | "url": "https://github.com/NixOS/nixos-search"
10 | },
11 | "main": "index.js",
12 | "scripts": {
13 | "analyse": "elm-analyse -s -p 3001 -o",
14 | "build": "webpack",
15 | "dev": "webpack serve --port 3000 --config config/webpack.dev.js",
16 | "nodebug": "webpack serve --port 3000 --config config/webpack.dev.js --env nodebug",
17 | "prod": "NODE_ENV=production webpack --config config/webpack.prod.js",
18 | "start": "npm run dev",
19 | "test": "elm-test"
20 | },
21 | "devDependencies": {
22 | "@babel/core": "^7.16.5",
23 | "@babel/preset-env": "^7.16.5",
24 | "autoprefixer": "^10.4.0",
25 | "babel-loader": "^8.2.3",
26 | "clean-webpack-plugin": "^4.0.0",
27 | "copy-webpack-plugin": "^10.2.0",
28 | "css-loader": "^4.3.0",
29 | "css-minimizer-webpack-plugin": "^3.3.1",
30 | "elm": "^0.19.1-5",
31 | "elm-reloader": "^1.0.0",
32 | "elm-test": "^0.19.1-revision9",
33 | "elm-webpack-loader": "^8.0.0",
34 | "file-loader": "^6.2.0",
35 | "html-webpack-plugin": "^5.5.0",
36 | "mini-css-extract-plugin": "^2.4.5",
37 | "postcss": "^8.4.4",
38 | "postcss-cli": "^9.0.2",
39 | "postcss-loader": "^6.2.1",
40 | "resolve-url-loader": "^4.0.0",
41 | "sass": "^1.43.5",
42 | "sass-loader": "^12.3.0",
43 | "style-loader": "^3.3.1",
44 | "terser-webpack-plugin": "^5.3.0",
45 | "url-loader": "^4.1.1",
46 | "webpack": "^5.64.4",
47 | "webpack-cli": "^4.9.1",
48 | "webpack-dev-server": "^4.6.0",
49 | "webpack-merge": "^5.8.0"
50 | },
51 | "engines": {
52 | "node": ">=12"
53 | },
54 | "prettier": {
55 | "tabWidth": 4
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/flake-info/src/lib.rs:
--------------------------------------------------------------------------------
1 | #![recursion_limit = "256"]
2 |
3 | use anyhow::Result;
4 | use data::{import::Kind, Export, Flake, Source};
5 | use lazy_static::lazy_static;
6 | use std::path::{Path, PathBuf};
7 |
8 | pub mod commands;
9 | pub mod data;
10 |
11 | #[cfg(feature = "elastic")]
12 | pub mod elastic;
13 |
14 | pub use commands::get_flake_info;
15 | use log::trace;
16 |
17 | lazy_static! {
18 | static ref DATADIR: PathBuf =
19 | Path::new(option_env!("ROOTDIR").unwrap_or(env!("CARGO_MANIFEST_DIR"))).join("assets");
20 | }
21 |
22 | pub fn process_flake(
23 | source: &Source,
24 | kind: &data::import::Kind,
25 | temp_store: bool,
26 | extra: &[String],
27 | ) -> Result<(Flake, Vec)> {
28 | let mut info = commands::get_flake_info(source.to_flake_ref(), temp_store, extra)?;
29 | info.source = Some(source.clone());
30 | let packages = commands::get_derivation_info(source.to_flake_ref(), *kind, temp_store, extra)?;
31 | trace!("flake info: {:#?}", info);
32 | trace!("flake content: {:#?}", packages);
33 |
34 | let exports: Vec = packages
35 | .into_iter()
36 | .map(|p| Export::flake(info.clone(), p))
37 | .collect::>>()?;
38 |
39 | Ok((info, exports))
40 | }
41 |
42 | pub fn process_nixpkgs(nixpkgs: &Source, kind: &Kind) -> Result, anyhow::Error> {
43 | let drvs = if matches!(kind, Kind::All | Kind::Package) {
44 | commands::get_nixpkgs_info(nixpkgs.to_flake_ref())?
45 | } else {
46 | Vec::new()
47 | };
48 |
49 | let mut options = if matches!(kind, Kind::All | Kind::Option) {
50 | commands::get_nixpkgs_options(nixpkgs.to_flake_ref())?
51 | } else {
52 | Vec::new()
53 | };
54 |
55 | let mut all = drvs;
56 | all.append(&mut options);
57 |
58 | let exports = all
59 | .into_iter()
60 | .map(Export::nixpkgs)
61 | .collect::>>()?;
62 | Ok(exports)
63 | }
64 |
--------------------------------------------------------------------------------
/frontend/config/webpack.prod.js:
--------------------------------------------------------------------------------
1 | const {merge} = require('webpack-merge');
2 |
3 | const CopyWebpackPlugin = require("copy-webpack-plugin");
4 | // JS minification
5 | const TerserPlugin = require("terser-webpack-plugin");
6 | // Production CSS assets - separate, minimised file
7 | const MiniCssExtractPlugin = require("mini-css-extract-plugin");
8 | const CssMinimizerPlugin = require("css-minimizer-webpack-plugin");
9 |
10 | const common = require('./webpack.common.js');
11 |
12 | const prod = {
13 | mode: 'production',
14 | optimization: {
15 | minimize: true,
16 | minimizer: [
17 | new TerserPlugin(),
18 | new CssMinimizerPlugin(),
19 | ]
20 | },
21 | plugins: [
22 | // Copy static assets
23 | new CopyWebpackPlugin({
24 | patterns: [{from: "src/assets"}]
25 | }),
26 | new MiniCssExtractPlugin({
27 | // Options similar to the same options in webpackOptions.output
28 | filename: "[name]-[chunkhash].css"
29 | })
30 | ],
31 | module: {
32 | rules: [
33 | {
34 | test: /\.elm$/,
35 | use: {
36 | loader: "elm-webpack-loader",
37 | options: {
38 | optimize: true
39 | }
40 | }
41 | },
42 | {
43 | test: /\.(sa|sc|c)ss$/i,
44 | use: [
45 | MiniCssExtractPlugin.loader,
46 | "css-loader",
47 | {
48 | loader: "postcss-loader",
49 | options: {
50 | postcssOptions: {
51 | plugins: [
52 | require("autoprefixer"),
53 | ],
54 | },
55 | }
56 | }, "sass-loader"
57 | ]
58 | }
59 | ]
60 | }
61 |
62 | };
63 |
64 | module.exports = merge(common(false), prod);
65 |
--------------------------------------------------------------------------------
/flake-info/src/commands/nix_flake_attrs.rs:
--------------------------------------------------------------------------------
1 | use crate::data::import::{FlakeEntry, Kind};
2 | use anyhow::{Context, Result};
3 | use command_run::{Command, LogTo};
4 | use serde_json::Deserializer;
5 | use std::fmt::Display;
6 | use std::path::PathBuf;
7 |
8 | const ARGS: [&str; 4] = [
9 | "eval",
10 | "--json",
11 | "--no-allow-import-from-derivation",
12 | "--no-write-lock-file",
13 | ];
14 |
15 | /// Uses `nix` to fetch the provided flake and read general information
16 | /// about it using `nix flake info`
17 | pub fn get_derivation_info + Display>(
18 | flake_ref: T,
19 | kind: Kind,
20 | temp_store: bool,
21 | extra: &[String],
22 | ) -> Result> {
23 | let mut command = Command::with_args("nix", ARGS.iter());
24 | command.add_arg_pair("-f", super::EXTRACT_SCRIPT.clone());
25 | command.add_arg_pair("-I", "nixpkgs=channel:nixpkgs-unstable");
26 | command.add_args(["--override-flake", "input-flake", flake_ref.as_ref()].iter());
27 | command.add_args(["--argstr", "flake", flake_ref.as_ref()].iter());
28 | command.add_arg(kind.as_ref());
29 | if temp_store {
30 | let temp_store_path = PathBuf::from("/tmp/flake-info-store");
31 | if !temp_store_path.exists() {
32 | std::fs::create_dir_all(&temp_store_path)
33 | .with_context(|| "Couldn't create temporary store path")?;
34 | }
35 | command.add_arg_pair("--store", temp_store_path.canonicalize()?);
36 | }
37 | command.add_args(extra);
38 | command.enable_capture();
39 | command.log_to = LogTo::Log;
40 | command.log_output_on_error = true;
41 |
42 | let parsed: Result> = command
43 | .run()
44 | .with_context(|| format!("Failed to gather information about {}", flake_ref))
45 | .and_then(|o| {
46 | let output = &*o.stdout_string_lossy();
47 | let de = &mut Deserializer::from_str(output);
48 | serde_path_to_error::deserialize(de)
49 | .with_context(|| format!("Failed to analyze flake {}", flake_ref))
50 | });
51 | parsed
52 | }
53 |
--------------------------------------------------------------------------------
/flake.lock:
--------------------------------------------------------------------------------
1 | {
2 | "nodes": {
3 | "flake-utils": {
4 | "locked": {
5 | "lastModified": 1667395993,
6 | "narHash": "sha256-nuEHfE/LcWyuSWnS8t12N1wc105Qtau+/OdUAjtQ0rA=",
7 | "owner": "numtide",
8 | "repo": "flake-utils",
9 | "rev": "5aed5285a952e0b949eb3ba02c12fa4fcfef535f",
10 | "type": "github"
11 | },
12 | "original": {
13 | "owner": "numtide",
14 | "repo": "flake-utils",
15 | "type": "github"
16 | }
17 | },
18 | "nixos-org-configurations": {
19 | "flake": false,
20 | "locked": {
21 | "lastModified": 1666806338,
22 | "narHash": "sha256-Q0uLdIJAMi1sC0bNOKoPJk39hMMkves1rtEsFDAZZ5o=",
23 | "owner": "NixOS",
24 | "repo": "nixos-org-configurations",
25 | "rev": "cebfd15c30724cadacf85b5fd950dc1070c4eb7d",
26 | "type": "github"
27 | },
28 | "original": {
29 | "owner": "NixOS",
30 | "repo": "nixos-org-configurations",
31 | "type": "github"
32 | }
33 | },
34 | "nixpkgs": {
35 | "locked": {
36 | "lastModified": 1667231093,
37 | "narHash": "sha256-RERXruzBEBuf0c7OfZeX1hxEKB+PTCUNxWeB6C1jd8Y=",
38 | "owner": "NixOS",
39 | "repo": "nixpkgs",
40 | "rev": "d40fea9aeb8840fea0d377baa4b38e39b9582458",
41 | "type": "github"
42 | },
43 | "original": {
44 | "id": "nixpkgs",
45 | "ref": "nixos-unstable",
46 | "type": "indirect"
47 | }
48 | },
49 | "npmlock2nix": {
50 | "flake": false,
51 | "locked": {
52 | "lastModified": 1666460237,
53 | "narHash": "sha256-HME6rnysvCwUVtH+BDWDGahmweMaLgD2wqHeRuGp6QI=",
54 | "owner": "nix-community",
55 | "repo": "npmlock2nix",
56 | "rev": "eeed152290ec2425f96c5e74e469c40b621e1468",
57 | "type": "github"
58 | },
59 | "original": {
60 | "owner": "nix-community",
61 | "repo": "npmlock2nix",
62 | "type": "github"
63 | }
64 | },
65 | "root": {
66 | "inputs": {
67 | "flake-utils": "flake-utils",
68 | "nixos-org-configurations": "nixos-org-configurations",
69 | "nixpkgs": "nixpkgs",
70 | "npmlock2nix": "npmlock2nix"
71 | }
72 | }
73 | },
74 | "root": "root",
75 | "version": 7
76 | }
77 |
--------------------------------------------------------------------------------
/frontend/src/Route/SearchQuery.elm:
--------------------------------------------------------------------------------
1 | module Route.SearchQuery exposing
2 | ( RawQuery
3 | , SearchQuery
4 | , absolute
5 | , searchQueryToString
6 | , searchString
7 | , toRawQuery
8 | , toSearchQuery
9 | )
10 |
11 | import Dict exposing (Dict)
12 | import Url
13 | import Url.Builder
14 |
15 |
16 |
17 | -- RawQuery
18 |
19 |
20 | type RawQuery
21 | = RawQuery (Dict String String)
22 |
23 |
24 | chunk : String -> String -> Maybe ( String, String )
25 | chunk sep str =
26 | case String.split sep str of
27 | [] ->
28 | Nothing
29 |
30 | [ key ] ->
31 | Just ( key, "" )
32 |
33 | key :: xs ->
34 | Just ( key, String.join sep xs )
35 |
36 |
37 | toRawQuery : Url.Url -> Maybe RawQuery
38 | toRawQuery =
39 | Maybe.map (RawQuery << Dict.fromList << List.filterMap (chunk "=") << String.split "&")
40 | << .query
41 |
42 |
43 |
44 | -- SearchQuery
45 |
46 |
47 | {-| This is type safe wrapper for working with search queries in url
48 | -}
49 | type SearchQuery
50 | = SearchQuery String
51 |
52 |
53 | searchString : String -> RawQuery -> Maybe SearchQuery
54 | searchString name (RawQuery dict) =
55 | Maybe.map SearchQuery <| Dict.get name dict
56 |
57 |
58 | searchQueryToString : SearchQuery -> Maybe String
59 | searchQueryToString (SearchQuery str) =
60 | Url.percentDecode <| String.replace "+" "%20" str
61 |
62 |
63 | toSearchQuery : String -> SearchQuery
64 | toSearchQuery query =
65 | SearchQuery <| String.replace "%20" "+" <| Url.percentEncode query
66 |
67 |
68 | {-| Build absolute URL with support for search query strings
69 | -}
70 | absolute : List String -> List Url.Builder.QueryParameter -> List ( String, SearchQuery ) -> String
71 | absolute path query searchQuery =
72 | let
73 | searchStrings =
74 | List.map (\( name, SearchQuery val ) -> name ++ "=" ++ val) searchQuery
75 | |> String.join "&"
76 | in
77 | Url.Builder.absolute path query
78 | |> (\str ->
79 | str
80 | ++ (case query of
81 | [] ->
82 | "?" ++ searchStrings
83 |
84 | _ ->
85 | "&" ++ searchStrings
86 | )
87 | )
88 |
--------------------------------------------------------------------------------
/flakes/manual.toml:
--------------------------------------------------------------------------------
1 | [[sources]]
2 | type = "github"
3 | owner = "NixOS"
4 | repo = "hydra"
5 |
6 | [[sources]]
7 | type = "github"
8 | owner = "ngi-nix"
9 | repo = "offen"
10 |
11 | [[sources]]
12 | type = "github"
13 | owner = "ngi-nix"
14 | repo = "pixelfed"
15 |
16 | [[sources]]
17 | type = "github"
18 | owner = "ngi-nix"
19 | repo = "lightmeter"
20 |
21 | [[sources]]
22 | type = "github"
23 | owner = "ngi-nix"
24 | repo = "openpgp-ca"
25 |
26 | [[sources]]
27 | type = "github"
28 | owner = "ngi-nix"
29 | repo = "weblate"
30 |
31 | [[sources]]
32 | type = "github"
33 | owner = "fort-nix"
34 | repo = "nix-bitcoin"
35 |
36 | [[sources]]
37 | type = "gitlab"
38 | owner = "pi-lar"
39 | repo = "neuropil"
40 |
41 | [[sources]]
42 | type = "github"
43 | owner = "tweag"
44 | repo = "nickel"
45 |
46 | [[sources]]
47 | type = "github"
48 | owner = "pinpox"
49 | repo = "nixos"
50 |
51 | [[sources]]
52 | type = "github"
53 | owner = "Mic92"
54 | repo = "sops-nix"
55 |
56 | [[sources]]
57 | type = "github"
58 | owner = "PaddiM8"
59 | repo = "kalker"
60 |
61 | [[source]]
62 | type = "github"
63 | owner = "pta2002"
64 | repo = "nixvim"
65 |
66 | [[sources]]
67 | type = "github"
68 | owner = "yusdacra"
69 | repo = "nix-cargo-integration"
70 |
71 | [[sources]]
72 | type = "github"
73 | owner = "yusdacra"
74 | repo = "rust-nix-templater"
75 |
76 | [[sources]]
77 | type = "github"
78 | owner = "srid"
79 | repo = "emanote"
80 |
81 | [[sources]]
82 | type = "git"
83 | url = "git+https://git.sr.ht/~kerstin/sway-timetracker?ref=main"
84 |
85 | [[sources]]
86 | type = "github"
87 | owner = "PolyMC"
88 | repo = "PolyMC"
89 |
90 | [[sources]]
91 | type = "gitlab"
92 | owner = "simple-nixos-mailserver"
93 | repo = "nixos-mailserver"
94 |
95 | [[sources]]
96 | type = "git"
97 | url = "git+https://codeberg.org/wolfangaukang/stream-alert-bot?ref=main"
98 |
99 | [[sources]]
100 | type = "git"
101 | url = "git+https://codeberg.org/wolfangaukang/python-trovo?ref=main"
102 |
103 | [[sources]]
104 | type = "git"
105 | url = "git+https://codeberg.org/wolfangaukang/multifirefox?ref=main"
106 |
107 | [[sources]]
108 | type = "github"
109 | owner = "juliosueiras-nix"
110 | repo = "nix-security"
111 |
112 | [[sources]]
113 | type = "github"
114 | owner = "astro"
115 | repo = "microvm.nix"
116 |
117 | [[sources]]
118 | type = "github"
119 | owner = "input-output-hk"
120 | repo = "haskell.nix"
121 |
122 | [[sources]]
123 | type = "sourcehut"
124 | owner = "~munksgaard"
125 | repo = "geomyidae-flake"
126 |
--------------------------------------------------------------------------------
/frontend/config/webpack.common.js:
--------------------------------------------------------------------------------
1 |
2 | const path = require('path');
3 |
4 | const webpack = require("webpack");
5 | const HtmlWebpackPlugin = require('html-webpack-plugin');
6 | const {CleanWebpackPlugin} = require('clean-webpack-plugin');
7 |
8 |
9 | module.exports = (withDebug) => {
10 | return {
11 | entry: './src/index.js',
12 | output: {
13 | path: path.resolve(__dirname, '../dist'),
14 | filename: 'bundle.js'
15 | },
16 | resolve: {
17 | modules: [path.join(__dirname, "../src"), 'node_modules'],
18 | extensions: [".elm", ".js"]
19 | },
20 | plugins: [
21 | new HtmlWebpackPlugin({
22 | template: "./src/index.html"
23 | }),
24 | new CleanWebpackPlugin(),
25 | new webpack.EnvironmentPlugin([
26 | "ELASTICSEARCH_MAPPING_SCHEMA_VERSION",
27 | "NIXOS_CHANNELS"
28 | ]),
29 | new webpack.DefinePlugin({
30 | 'process.env': JSON.stringify(process.env)
31 | }),
32 | ],
33 | optimization: {
34 | // Prevents compilation errors causing the hot loader to lose state
35 | emitOnErrors: false
36 | },
37 | module: {
38 | rules: [
39 | {
40 | test: /\.elm$/,
41 | use: [
42 | {loader: "elm-reloader"},
43 | {
44 | loader: "elm-webpack-loader",
45 | options: {
46 | // add Elm's debug overlay to output
47 | debug: withDebug,
48 | optimize: false
49 | }
50 | }
51 | ]
52 | }, {
53 | test: /\.(sa|sc|c)ss$/i,
54 | use: ['style-loader', 'css-loader', {
55 | loader: "postcss-loader",
56 | options: {
57 | postcssOptions: {
58 | plugins: [
59 | require("autoprefixer"),
60 | ],
61 | },
62 | }
63 | }, "sass-loader"],
64 | }, {
65 | test: /\.js$/,
66 | exclude: /node_modules/,
67 | use: {
68 | loader: "babel-loader"
69 | }
70 | },
71 | {
72 | test: /\.(png|svg|jpg|jpeg|gif)$/i,
73 | type: 'asset/resource',
74 | },
75 | ],
76 | }
77 | };
78 | };
79 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # search.nixos.org
2 |
3 | This repository contains the scripts and the web application for
4 | `search.nixos.org`.
5 |
6 |
7 | ## How this project came to be
8 |
9 | Initial idea was to replace NixOS packages and options search which was
10 | fetching one JSON file which contained all packages (or options). This approach
11 | is good for its simple setup, but started to show its problems when packages
12 | number was getting bigger and bigger. I'm sure we could optimize it further,
13 | but ideas what all could we do if there would be some database in the back were
14 | to tempting not to try.
15 |
16 | For backend we are using Elasticsearch instance which is kindly sponsored by
17 | [bonsai.io](https://bonsai.io). On the frontend we are using
18 | [Elm](https://elm-lang.org).
19 |
20 |
21 | ## How search works?
22 |
23 | The use case we want to solve is that a visitor want to see if a package
24 | exists or to look up certain package's details.
25 |
26 | A user wants to converge to a single result if possible. The more characters
27 | are added to a search query the more narrow is search is and we should show
28 | less results.
29 |
30 | Very important is also ranking of search results. This will bring more relevant
31 | search results to the top, since a lot of times it is hard to produce search
32 | query that will output only one result item.
33 |
34 | A less important, but providing better user experience. are suggestions for
35 | writing better search query. Suggesting feature should guide user to write
36 | better queries which in turn will produce better results.
37 |
38 |
39 | ## Development
40 |
41 | To start developing open a terminal and run:
42 |
43 | ```
44 | env --chdir=frontend nix develop -c yarn dev
45 | ```
46 |
47 | You can point your browser to `http://localhost:3000` and start developing.
48 | Any changes to source files (`./frontend/src`) will trigger a hot reload of an
49 | application.
50 |
51 |
52 | ## Deploying
53 |
54 | - On each commit to `main` branch a GitHub Action is triggered.
55 | - GitHub Action then builds production version of the web application using
56 | `yarn prod` command.
57 | - The built web application (in `./dist`) is then deployed to Netlify.
58 | - GitHub Action can also be triggered via Pull Request, which if Pull Request
59 | was created from a non-forked repo's branch, will provide a preview url in a
60 | comment.
61 |
62 | ## Adding flakes
63 |
64 | To add your own flakes to the search index edit [./flakes/manual.toml](./flakes/manual.toml).
65 |
66 | Possible types are `github`, `gitlab`, `sourcehut`, and `git` (which is the fallback for any kind of git repository but requires to set a revision key manually as of now).
67 |
68 | To test whether your flake is compatible with nix flake-info you can try running `flake-info` against it
69 |
70 | ```
71 | $ nix run github:nixos/nixos-search#flake-info -- flake
72 | ```
73 |
--------------------------------------------------------------------------------
/flake-info/src/commands/nixpkgs_info.rs:
--------------------------------------------------------------------------------
1 | use anyhow::{Context, Result};
2 | use serde_json::Deserializer;
3 | use std::{collections::HashMap, fmt::Display};
4 |
5 | use command_run::{Command, LogTo};
6 | use log::error;
7 |
8 | use crate::data::import::{NixOption, NixpkgsEntry, Package};
9 |
10 | pub fn get_nixpkgs_info + Display>(nixpkgs_channel: T) -> Result> {
11 | let mut command = Command::new("nix-env");
12 | command.add_args(&[
13 | "--json",
14 | "-f",
15 | "",
16 | "-I",
17 | format!("nixpkgs={}", nixpkgs_channel.as_ref()).as_str(),
18 | "--arg",
19 | "config",
20 | "import ",
21 | "-qa",
22 | "--meta",
23 | ]);
24 |
25 | command.enable_capture();
26 | command.log_to = LogTo::Log;
27 | command.log_output_on_error = true;
28 |
29 | let parsed: Result> = command
30 | .run()
31 | .with_context(|| {
32 | format!(
33 | "Failed to gather information about nixpkgs {}",
34 | nixpkgs_channel.as_ref()
35 | )
36 | })
37 | .and_then(|o| {
38 | let output = &*o.stdout_string_lossy();
39 | let de = &mut Deserializer::from_str(output);
40 | let attr_set: HashMap =
41 | serde_path_to_error::deserialize(de).with_context(|| "Could not parse packages")?;
42 | Ok(attr_set
43 | .into_iter()
44 | .map(|(attribute, package)| NixpkgsEntry::Derivation { attribute, package })
45 | .collect())
46 | });
47 |
48 | parsed
49 | }
50 |
51 | pub fn get_nixpkgs_options + Display>(
52 | nixpkgs_channel: T,
53 | ) -> Result> {
54 | let mut command = Command::with_args("nix", &["eval", "--json"]);
55 | command.add_arg_pair("-f", super::EXTRACT_SCRIPT.clone());
56 | command.add_arg_pair("-I", format!("nixpkgs={}", nixpkgs_channel.as_ref()));
57 | command.add_arg("nixos-options");
58 |
59 | command.enable_capture();
60 | command.log_to = LogTo::Log;
61 | command.log_output_on_error = true;
62 |
63 | let parsed = command.run().with_context(|| {
64 | format!(
65 | "Failed to gather information about nixpkgs {}",
66 | nixpkgs_channel.as_ref()
67 | )
68 | });
69 |
70 | if let Err(ref e) = parsed {
71 | error!("Command error: {}", e);
72 | }
73 |
74 | parsed.and_then(|o| {
75 | let output = &*o.stdout_string_lossy();
76 | let de = &mut Deserializer::from_str(output);
77 | let attr_set: Vec =
78 | serde_path_to_error::deserialize(de).with_context(|| "Could not parse options")?;
79 | Ok(attr_set.into_iter().map(NixpkgsEntry::Option).collect())
80 | })
81 | }
82 |
--------------------------------------------------------------------------------
/frontend/elm-srcs.nix:
--------------------------------------------------------------------------------
1 | {
2 |
3 | "krisajenkins/remotedata" = {
4 | sha256 = "0m5bk0qhsjv14vajqrkph386696pnhj5rn51kgma8lwyvvx9ihw1";
5 | version = "6.0.1";
6 | };
7 |
8 | "elm/json" = {
9 | sha256 = "0kjwrz195z84kwywaxhhlnpl3p251qlbm5iz6byd6jky2crmyqyh";
10 | version = "1.1.3";
11 | };
12 |
13 | "truqu/elm-base64" = {
14 | sha256 = "12w68b4idbs2vn0gm0lj354pm745jb7n0fj69408mpvh5r1z4m1b";
15 | version = "2.0.4";
16 | };
17 |
18 | "elm/regex" = {
19 | sha256 = "0lijsp50w7n1n57mjg6clpn9phly8vvs07h0qh2rqcs0f1jqvsa2";
20 | version = "1.0.0";
21 | };
22 |
23 | "elm/html" = {
24 | sha256 = "1n3gpzmpqqdsldys4ipgyl1zacn0kbpc3g4v3hdpiyfjlgh8bf3k";
25 | version = "1.0.0";
26 | };
27 |
28 | "elm/browser" = {
29 | sha256 = "0nagb9ajacxbbg985r4k9h0jadqpp0gp84nm94kcgbr5sf8i9x13";
30 | version = "1.0.2";
31 | };
32 |
33 | "elm/core" = {
34 | sha256 = "0gyk7lx3b6vx2jlfbxdsb4xffn0wdvg5yxldq50jr2kk5dzc2prj";
35 | version = "1.0.4";
36 | };
37 |
38 | "elm/url" = {
39 | sha256 = "0av8x5syid40sgpl5vd7pry2rq0q4pga28b4yykn9gd9v12rs3l4";
40 | version = "1.0.0";
41 | };
42 |
43 | "elm/http" = {
44 | sha256 = "008bs76mnp48b4dw8qwjj4fyvzbxvlrl4xpa2qh1gg2kfwyw56v1";
45 | version = "2.0.0";
46 | };
47 |
48 | "NoRedInk/elm-json-decode-pipeline" = {
49 | sha256 = "0y25xn0yx1q2xlg1yx1i0hg4xq1yxx6yfa99g272z8162si75hnl";
50 | version = "1.0.0";
51 | };
52 |
53 | "hecrj/html-parser" = {
54 | sha256 = "1fbr3f8j8jjmjknyaq55jlnfazrljqf1128xrc5gzsw3x56fwrbf";
55 | version = "2.3.4";
56 | };
57 |
58 | "elm/bytes" = {
59 | sha256 = "02ywbf52akvxclpxwj9n04jydajcbsbcbsnjs53yjc5lwck3abwj";
60 | version = "1.0.8";
61 | };
62 |
63 | "elm/file" = {
64 | sha256 = "1rljcb41dl97myidyjih2yliyzddkr2m7n74x7gg46rcw4jl0ny8";
65 | version = "1.0.5";
66 | };
67 |
68 | "rtfeldman/elm-hex" = {
69 | sha256 = "1y0aa16asvwdqmgbskh5iba6psp43lkcjjw9mgzj3gsrg33lp00d";
70 | version = "1.0.0";
71 | };
72 |
73 | "elm/parser" = {
74 | sha256 = "0a3cxrvbm7mwg9ykynhp7vjid58zsw03r63qxipxp3z09qks7512";
75 | version = "1.1.0";
76 | };
77 |
78 | "elm/time" = {
79 | sha256 = "0vch7i86vn0x8b850w1p69vplll1bnbkp8s383z7pinyg94cm2z1";
80 | version = "1.0.0";
81 | };
82 |
83 | "elm/virtual-dom" = {
84 | sha256 = "0q1v5gi4g336bzz1lgwpn5b1639lrn63d8y6k6pimcyismp2i1yg";
85 | version = "1.0.2";
86 | };
87 |
88 | "elm-explorations/test" = {
89 | sha256 = "1fsd7bajm7qa93r5pn3mdafqh3blpzya601jbs9l238p0hmvh576";
90 | version = "1.2.2";
91 | };
92 |
93 | "elm/svg" = {
94 | sha256 = "1cwcj73p61q45wqwgqvrvz3aypjyy3fw732xyxdyj6s256hwkn0k";
95 | version = "1.0.1";
96 | };
97 |
98 | "elm/random" = {
99 | sha256 = "138n2455wdjwa657w6sjq18wx2r0k60ibpc4frhbqr50sncxrfdl";
100 | version = "1.0.0";
101 | };
102 | }
103 |
--------------------------------------------------------------------------------
/.github/workflows/import-to-elasticsearch.yml:
--------------------------------------------------------------------------------
1 | name: "Hourly import to Elasticsearch"
2 |
3 | on:
4 | workflow_dispatch:
5 | schedule:
6 | - cron: '0 * * * *'
7 |
8 | jobs:
9 |
10 | nixos-channels:
11 | runs-on: ubuntu-latest
12 |
13 | outputs:
14 | matrix: ${{ steps.nixos-channels.outputs.matrix }}
15 |
16 | steps:
17 | - name: Checking out the repository
18 | uses: actions/checkout@v3
19 |
20 | - name: Setup
21 | uses: ./.github/actions/common-setup
22 | with:
23 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
24 | CACHIX_SIGNING_KEY: ${{ secrets.CACHIX_SIGNING_KEY }}
25 |
26 | - name: NixOS Channels
27 | id: nixos-channels
28 | run: |
29 | nix build -L .#nixosChannels
30 | channels="{\"channel\": $(< ./result)}"
31 | echo $channels
32 | echo "::set-output name=matrix::$channels"
33 |
34 | import-nixpkgs:
35 | needs: nixos-channels
36 | runs-on: ubuntu-latest
37 |
38 | strategy:
39 | fail-fast: false
40 | matrix: ${{ fromJSON(needs.nixos-channels.outputs.matrix) }}
41 |
42 | env:
43 | RUST_LOG: debug
44 | FI_ES_EXISTS_STRATEGY: abort
45 | FI_ES_URL: ${{ secrets.ELASTICSEARCH_URL2 }}
46 |
47 | steps:
48 | - name: Checking out the repository
49 | uses: actions/checkout@v3
50 |
51 | - name: Setup
52 | uses: ./.github/actions/common-setup
53 | with:
54 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
55 | CACHIX_SIGNING_KEY: ${{ secrets.CACHIX_SIGNING_KEY }}
56 |
57 | - name: Import ${{ matrix.channel }} channel
58 | env:
59 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
60 | run: |
61 | nix run .#flake-info -- --push --elastic-schema-version=$(< VERSION) nixpkgs ${{ matrix.channel }}
62 | if: github.repository_owner == 'NixOS'
63 |
64 | - name: Warmup ${{ matrix.channel }} channel
65 | run: |
66 | for (( i = 0; i < 3; i++ )) do
67 | curl -sS ${{ secrets.ELASTICSEARCH_URL2 }}/latest-$(< VERSION)-nixos-${{ matrix.channel }}/_search | jq -c '.took // .'
68 | done
69 | if: github.repository_owner == 'NixOS'
70 |
71 |
72 | import-flakes:
73 | runs-on: ubuntu-latest
74 |
75 | strategy:
76 | fail-fast: false
77 | matrix:
78 | group:
79 | - "manual"
80 |
81 | env:
82 | RUST_LOG: debug
83 | FI_ES_EXISTS_STRATEGY: recreate
84 | FI_ES_URL: ${{ secrets.ELASTICSEARCH_URL2 }}
85 |
86 | steps:
87 |
88 | - name: Checking out the repository
89 | uses: actions/checkout@v3
90 |
91 | - name: Setup
92 | uses: ./.github/actions/common-setup
93 | with:
94 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
95 | CACHIX_SIGNING_KEY: ${{ secrets.CACHIX_SIGNING_KEY }}
96 |
97 | - name: Import ${{ matrix.group }} group
98 | run: |
99 | nix run .#flake-info -- --push --elastic-schema-version=$(< ./VERSION) group ./flakes/${{ matrix.group }}.toml ${{ matrix.group }}
100 | if: github.repository_owner == 'NixOS'
101 |
102 | - name: Warmup ${{ matrix.group }} group
103 | run: |
104 | for (( i = 0; i < 3; i++ )) do
105 | curl -sS ${{ secrets.ELASTICSEARCH_URL2 }}/latest-$(< VERSION)-group-${{ matrix.group }}/_search | jq -c '.took // .'
106 | done
107 | if: github.repository_owner == 'NixOS'
108 |
--------------------------------------------------------------------------------
/flake-info/src/data/flake.rs:
--------------------------------------------------------------------------------
1 | use std::path::PathBuf;
2 |
3 | use serde::{Deserialize, Serialize};
4 |
5 | use super::Source;
6 |
7 | /// Holds general infoamtion about a flake
8 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
9 | pub struct Flake {
10 | #[serde(rename(serialize = "flake_description"))]
11 | pub description: Option,
12 | #[serde(rename(serialize = "flake_path"), skip_serializing)]
13 | pub path: PathBuf,
14 | #[serde(rename(serialize = "flake_resolved"))]
15 | pub resolved: Repo,
16 |
17 | #[serde(rename(serialize = "flake_name"), skip_deserializing)]
18 | pub name: String,
19 |
20 | pub revision: Option,
21 |
22 | #[serde(
23 | skip_deserializing,
24 | rename(serialize = "flake_source"),
25 | skip_serializing_if = "Option::is_none"
26 | )]
27 | pub source: Option,
28 | }
29 |
30 | impl Flake {
31 | pub(crate) fn resolve_name(mut self) -> Self {
32 | self.name = match &self.resolved {
33 | Repo::Git { .. } => Default::default(),
34 | Repo::GitHub { repo, .. } => repo.clone(),
35 | Repo::Gitlab { repo, .. } => repo.clone(),
36 | Repo::SourceHut { repo, .. } => repo.clone(),
37 | };
38 | self
39 | }
40 | }
41 |
42 | /// Information about the flake origin
43 | /// Supports (local/raw) Git, GitHub, SourceHut and Gitlab repos
44 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
45 | #[serde(tag = "type", rename_all = "lowercase")]
46 | pub enum Repo {
47 | Git { url: PathBuf },
48 | GitHub { owner: String, repo: String },
49 | Gitlab { owner: String, repo: String },
50 | SourceHut { owner: String, repo: String },
51 | }
52 |
53 | #[cfg(test)]
54 | mod tests {
55 | use super::*;
56 |
57 | #[test]
58 | fn gitlab_flake() {
59 | let nix_info_out = r#"{"description":"neuropil is a secure messaging library for IoT, robotics and more.","lastModified":1616059502,"locked":{"lastModified":1616059502,"narHash":"sha256-fHB1vyjDXQq/E2/Xb6Xs3caAAc0VkUlnzu5kl/PvFW4=","owner":"pi-lar","repo":"neuropil","rev":"9e2f634ffa45da3f5feb158a12ee32e1673bfe35","type":"gitlab"},"original":{"owner":"pi-lar","repo":"neuropil","type":"gitlab"},"originalUrl":"gitlab:pi-lar/neuropil","path":"/nix/store/z4fp2fc9hca40nnvxi0116pfbrla5zgl-source","resolved":{"owner":"pi-lar","repo":"neuropil","type":"gitlab"},"resolvedUrl":"gitlab:pi-lar/neuropil","revision":"9e2f634ffa45da3f5feb158a12ee32e1673bfe35","url":"gitlab:pi-lar/neuropil/9e2f634ffa45da3f5feb158a12ee32e1673bfe35"}"#;
60 |
61 | assert_eq!(
62 | serde_json::de::from_str::(nix_info_out).unwrap(),
63 | Flake {
64 | description: Some(
65 | "neuropil is a secure messaging library for IoT, robotics and more.".into()
66 | ),
67 | path: "/nix/store/z4fp2fc9hca40nnvxi0116pfbrla5zgl-source".into(),
68 | resolved: Repo::Gitlab {
69 | owner: "pi-lar".into(),
70 | repo: "neuropil".into()
71 | },
72 | name: "".into(),
73 | source: None,
74 | revision: Some("9e2f634ffa45da3f5feb158a12ee32e1673bfe35".into())
75 | }
76 | );
77 |
78 | assert_eq!(
79 | serde_json::de::from_str::(nix_info_out)
80 | .unwrap()
81 | .resolve_name()
82 | .name,
83 | "neuropil"
84 | );
85 | }
86 | }
87 |
--------------------------------------------------------------------------------
/flake-info/src/data/pandoc.rs:
--------------------------------------------------------------------------------
1 | use lazy_static::lazy_static;
2 | use std::path::{Path, PathBuf};
3 |
4 | use pandoc::*;
5 |
6 | const FILTERS_PATH: &str = env!("NIXPKGS_PANDOC_FILTERS_PATH");
7 |
8 | lazy_static! {
9 | static ref DOCBOOK_ROLES_FILTER: PathBuf =
10 | Path::new(FILTERS_PATH).join("docbook-reader/citerefentry-to-rst-role.lua");
11 | static ref MARKDOWN_ROLES_FILTER: PathBuf =
12 | Path::new(FILTERS_PATH).join("myst-reader/roles.lua");
13 | static ref MANPAGE_LINK_FILTER: PathBuf =
14 | Path::new(FILTERS_PATH).join("link-unix-man-references.lua");
15 | static ref XREF_FILTER: PathBuf = crate::DATADIR.join("data/fix-xrefs.lua");
16 | }
17 |
18 | pub trait PandocExt {
19 | fn render_docbook(&self) -> Result;
20 | fn render_markdown(&self) -> Result;
21 | }
22 |
23 | impl> PandocExt for T {
24 | fn render_docbook(&self) -> Result {
25 | if !self.as_ref().contains("<") {
26 | return Ok(format!(
27 | "{}
",
28 | self.as_ref()
29 | ));
30 | }
31 |
32 | let wrapper_xml = format!(
33 | "
34 |
35 | {}
36 |
37 | ",
38 | self.as_ref()
39 | );
40 |
41 | let mut pandoc = pandoc::new();
42 | pandoc.set_input(InputKind::Pipe(wrapper_xml));
43 | pandoc.set_input_format(InputFormat::DocBook, Vec::new());
44 | pandoc.set_output(OutputKind::Pipe);
45 | pandoc.set_output_format(OutputFormat::Html, Vec::new());
46 | pandoc.add_options(&[
47 | PandocOption::LuaFilter(DOCBOOK_ROLES_FILTER.clone()),
48 | PandocOption::LuaFilter(MANPAGE_LINK_FILTER.clone()),
49 | PandocOption::LuaFilter(XREF_FILTER.clone()),
50 | ]);
51 |
52 | pandoc.execute().map(|result| match result {
53 | PandocOutput::ToBuffer(html) => {
54 | format!("{}", html)
55 | }
56 | _ => unreachable!(),
57 | })
58 | }
59 |
60 | fn render_markdown(&self) -> Result {
61 | let mut pandoc = pandoc::new();
62 | pandoc.set_input(InputKind::Pipe(self.as_ref().into()));
63 | pandoc.set_input_format(
64 | InputFormat::Commonmark,
65 | [
66 | MarkdownExtension::Attributes,
67 | MarkdownExtension::BracketedSpans,
68 | MarkdownExtension::DefinitionLists,
69 | MarkdownExtension::FencedDivs,
70 | MarkdownExtension::PipeTables,
71 | MarkdownExtension::RawAttribute,
72 | MarkdownExtension::Smart,
73 | ]
74 | .to_vec(),
75 | );
76 | pandoc.set_output(OutputKind::Pipe);
77 | pandoc.set_output_format(OutputFormat::Html, Vec::new());
78 | pandoc.add_options(&[
79 | PandocOption::LuaFilter(MARKDOWN_ROLES_FILTER.clone()),
80 | PandocOption::LuaFilter(MANPAGE_LINK_FILTER.clone()),
81 | PandocOption::LuaFilter(XREF_FILTER.clone()),
82 | ]);
83 |
84 | pandoc.execute().map(|result| match result {
85 | PandocOutput::ToBuffer(html) => {
86 | format!("{}", html)
87 | }
88 | _ => unreachable!(),
89 | })
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/flake-info/src/data/source.rs:
--------------------------------------------------------------------------------
1 | use anyhow::Result;
2 | use serde::{Deserialize, Serialize};
3 | use std::{
4 | ffi::OsStr,
5 | fs::File,
6 | io::{self, Read},
7 | path::Path,
8 | };
9 |
10 | pub type Hash = String;
11 | pub type FlakeRef = String;
12 |
13 | /// Information about the flake origin
14 | /// Supports (local/raw) Git, GitHub, SourceHut and Gitlab repos
15 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
16 | #[serde(tag = "type", rename_all = "lowercase")]
17 | pub enum Source {
18 | Github {
19 | owner: String,
20 | repo: String,
21 | description: Option,
22 | #[serde(rename(deserialize = "hash"))]
23 | git_ref: Option,
24 | },
25 | Gitlab {
26 | owner: String,
27 | repo: String,
28 | git_ref: Option,
29 | },
30 | SourceHut {
31 | owner: String,
32 | repo: String,
33 | git_ref: Option,
34 | },
35 | Git {
36 | url: String,
37 | },
38 | Nixpkgs(Nixpkgs),
39 | }
40 |
41 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
42 | struct TomlDocument {
43 | sources: Vec,
44 | }
45 |
46 | impl Source {
47 | pub fn to_flake_ref(&self) -> FlakeRef {
48 | match self {
49 | Source::Github {
50 | owner,
51 | repo,
52 | git_ref,
53 | ..
54 | } => format!(
55 | "github:{}/{}{}",
56 | owner,
57 | repo,
58 | git_ref
59 | .as_ref()
60 | .map_or("".to_string(), |f| format!("?ref={}", f))
61 | ),
62 | Source::Gitlab {
63 | owner,
64 | repo,
65 | git_ref,
66 | } => format!(
67 | "gitlab:{}/{}{}",
68 | owner,
69 | repo,
70 | git_ref
71 | .as_ref()
72 | .map_or("".to_string(), |f| format!("?ref={}", f))
73 | ),
74 | Source::SourceHut {
75 | owner,
76 | repo,
77 | git_ref,
78 | } => format!(
79 | "sourcehut:{}/{}{}",
80 | owner,
81 | repo,
82 | git_ref
83 | .as_ref()
84 | .map_or("".to_string(), |f| format!("?ref={}", f))
85 | ),
86 | Source::Git { url } => url.to_string(),
87 | Source::Nixpkgs(Nixpkgs { git_ref, .. }) => format!(
88 | "https://api.github.com/repos/NixOS/nixpkgs/tarball/{}",
89 | git_ref
90 | ),
91 | }
92 | }
93 |
94 | pub fn read_sources_file(path: &Path) -> io::Result> {
95 | let mut file = File::open(path)?;
96 |
97 | let mut buf = String::new();
98 | file.read_to_string(&mut buf)?;
99 |
100 | if path.extension() == Some(OsStr::new("toml")) {
101 | let document: TomlDocument = toml::from_str(&buf)?;
102 | Ok(document.sources)
103 | } else {
104 | Ok(serde_json::from_str(&buf)?)
105 | }
106 | }
107 |
108 | pub async fn nixpkgs(channel: String) -> Result {
109 | #[derive(Deserialize, Debug)]
110 | struct ApiResult {
111 | commit: Commit,
112 | }
113 |
114 | #[derive(Deserialize, Debug)]
115 | struct Commit {
116 | sha: String,
117 | }
118 |
119 | let request = reqwest::Client::builder()
120 | .user_agent("nixos-search")
121 | .build()?
122 | .get(format!(
123 | "https://api.github.com/repos/nixos/nixpkgs/branches/nixos-{}",
124 | channel
125 | ));
126 |
127 | let request = match std::env::var("GITHUB_TOKEN") {
128 | Ok(token) => request.bearer_auth(token),
129 | _ => request,
130 | };
131 |
132 | let response = request.send().await?;
133 |
134 | if !response.status().is_success() {
135 | Err(anyhow::anyhow!(
136 | "GitHub returned {:?} {}",
137 | response.status(),
138 | response.text().await?
139 | ))
140 | } else {
141 | let git_ref = response.json::().await?.commit.sha;
142 | let nixpkgs = Nixpkgs { channel, git_ref };
143 | Ok(nixpkgs)
144 | }
145 | }
146 | }
147 |
148 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
149 | pub struct Nixpkgs {
150 | pub channel: String,
151 |
152 | pub git_ref: String,
153 | }
154 |
--------------------------------------------------------------------------------
/flake-info/src/data/prettyprint.rs:
--------------------------------------------------------------------------------
1 | use std::fmt::Display;
2 |
3 | use serde_json::Value;
4 |
5 | struct Indent(usize);
6 | impl Indent {
7 | fn next(&self) -> Indent {
8 | Indent(self.0 + 1)
9 | }
10 | }
11 |
12 | impl Display for Indent {
13 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
14 | write!(f, "{:width$}", "", width = self.0 * 2)
15 | }
16 | }
17 |
18 | pub fn print_value(value: Value) -> String {
19 | print_value_indent(value, Indent(0))
20 | }
21 |
22 | fn print_value_indent(value: Value, indent: Indent) -> String {
23 | match value {
24 | Value::Null => "null".to_owned(),
25 | Value::Bool(b) => format!("{}", b),
26 | Value::Number(n) => format!("{}", n),
27 | Value::String(s) => {
28 | let lines: Vec<&str> = s.lines().collect();
29 | if lines.len() > 1 {
30 | let lines = lines.join(&format!("\n{}", indent.next()));
31 | return format!(
32 | r#"''
33 | {next_indent}{lines}
34 | {indent}''"#,
35 | indent = indent,
36 | next_indent = indent.next(),
37 | lines = lines
38 | );
39 | }
40 |
41 | format!("{:?}", s)
42 | }
43 | Value::Array(a) => {
44 | if a.is_empty() {
45 | return "[ ]".to_owned();
46 | }
47 | let items = a
48 | .into_iter()
49 | .map(|v| print_value_indent(v, indent.next()))
50 | .collect::>()
51 | .join(&format!("\n{}", indent.next()));
52 |
53 | return format!(
54 | "[
55 | {next_indent}{items}
56 | {indent}]",
57 | indent = indent,
58 | next_indent = indent.next(),
59 | items = items
60 | );
61 | }
62 | Value::Object(o) => {
63 | if o.is_empty() {
64 | return "{ }".to_owned();
65 | }
66 | let items = o
67 | .into_iter()
68 | .map(|(k, v)| format!("{} = {}", k, print_value_indent(v, indent.next())))
69 | .collect::>()
70 | .join(&format!(";\n{}", indent.next()));
71 |
72 | return format!(
73 | "{{
74 | {next_indent}{items};
75 | {indent}}}",
76 | indent = indent,
77 | next_indent = indent.next(),
78 | items = items
79 | );
80 | }
81 | }
82 | }
83 |
84 | #[cfg(test)]
85 | mod tests {
86 | use serde_json::json;
87 |
88 | use super::*;
89 |
90 | #[test]
91 | fn test_string() {
92 | let json = json!("Hello World");
93 | assert_eq!(print_value(json), "\"Hello World\"");
94 | }
95 |
96 | #[test]
97 | fn test_multi_line_string() {
98 | let json = json!(
99 | r#" Hello
100 | World
101 | !!!"#
102 | );
103 | assert_eq!(
104 | print_value(json),
105 | r#"''
106 | Hello
107 | World
108 | !!!
109 | ''"#
110 | );
111 | }
112 |
113 | #[test]
114 | fn test_num() {
115 | let json = json!(1);
116 | assert_eq!(print_value(json), "1");
117 | }
118 |
119 | #[test]
120 | fn test_bool() {
121 | let json = json!(true);
122 | assert_eq!(print_value(json), "true");
123 | }
124 |
125 | #[test]
126 | fn test_empty_list() {
127 | let json = json!([]);
128 | assert_eq!(print_value(json), "[ ]");
129 | }
130 |
131 | #[test]
132 | fn test_filled_list() {
133 | let json = json!([1, "hello", true, null]);
134 | assert_eq!(
135 | print_value(json),
136 | r#"[
137 | 1
138 | "hello"
139 | true
140 | null
141 | ]"#
142 | );
143 | }
144 |
145 | #[test]
146 | fn test_empty_set() {
147 | let json = json!({});
148 | assert_eq!(print_value(json), "{ }");
149 | }
150 |
151 | #[test]
152 | fn test_filled_set() {
153 | let json = json!({"hello": "world"});
154 | assert_eq!(
155 | print_value(json),
156 | "{
157 | hello = \"world\";
158 | }"
159 | );
160 | }
161 |
162 | #[test]
163 | fn test_nested() {
164 | let json = json!(
165 | [
166 | "HDMI-0",
167 | {
168 | "output": "DVI-0",
169 | "primary": true
170 | },
171 | {
172 | "monitorConfig": "Option \"Rotate\" \"left\"",
173 | "output": "DVI-1"
174 | },
175 | [ "hello", "word" ]
176 | ]);
177 |
178 | assert_eq!(
179 | print_value(json),
180 | r#"[
181 | "HDMI-0"
182 | {
183 | output = "DVI-0";
184 | primary = true;
185 | }
186 | {
187 | monitorConfig = "Option \"Rotate\" \"left\"";
188 | output = "DVI-1";
189 | }
190 | [
191 | "hello"
192 | "word"
193 | ]
194 | ]"#
195 | );
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/flake.nix:
--------------------------------------------------------------------------------
1 | {
2 | description = "Code behind search.nixos.org";
3 |
4 | nixConfig.extra-substituters = [ "https://nixos-search.cachix.org" ];
5 | nixConfig.extra-trusted-public-keys = [ "nixos-search.cachix.org-1:1HV3YF8az4fywnH+pAd+CXFEdpTXtv9WpoivPi+H70o=" ];
6 |
7 | inputs.nixpkgs.url = "nixpkgs/nixos-unstable";
8 | inputs.flake-utils.url = "github:numtide/flake-utils";
9 | inputs.npmlock2nix.url = "github:nix-community/npmlock2nix";
10 | inputs.npmlock2nix.flake = false;
11 | inputs.nixos-org-configurations.url = "github:NixOS/nixos-org-configurations";
12 | inputs.nixos-org-configurations.flake = false;
13 |
14 | outputs = { self
15 | , nixpkgs
16 | , flake-utils
17 | , npmlock2nix
18 | , nixos-org-configurations
19 | }:
20 | flake-utils.lib.eachDefaultSystem
21 | (system:
22 | let
23 | pkgs = import nixpkgs {
24 | inherit system;
25 | overlays = [
26 | (self: super: {
27 | npmlock2nix = super.callPackage npmlock2nix {};
28 | })
29 | ];
30 | };
31 | lib = nixpkgs.lib;
32 | warnToUpgradeNix = lib.warn "Please upgrade Nix to 2.7 or later.";
33 | version = lib.fileContents ./VERSION;
34 | nixosChannels =
35 | let
36 | allChannels = (import "${nixos-org-configurations}/channels.nix").channels;
37 | filteredChannels =
38 | lib.filterAttrs
39 | (n: v:
40 | builtins.elem v.status ["rolling" "beta" "stable" "deprecated"] &&
41 | lib.hasPrefix "nixos-" n &&
42 | v ? variant && v.variant == "primary"
43 | )
44 | allChannels;
45 | in
46 | {
47 | channels =
48 | lib.mapAttrsToList
49 | (n: v:
50 | {
51 | id = lib.removePrefix "nixos-" n;
52 | status = v.status;
53 | jobset =
54 | builtins.concatStringsSep
55 | "/"
56 | (lib.init (lib.splitString "/" v.job));
57 | branch = n;
58 | }
59 | )
60 | filteredChannels;
61 | default =
62 | builtins.head
63 | (builtins.sort (e1: e2: ! (builtins.lessThan e1 e2))
64 | (builtins.map
65 | (lib.removePrefix "nixos-")
66 | (builtins.attrNames
67 | (lib.filterAttrs (_: v: v.status == "stable") filteredChannels)
68 | )
69 | )
70 | );
71 | };
72 | nixosChannelsFile = pkgs.runCommand "nixosChannels.json" {} ''
73 | echo '${builtins.toJSON (builtins.map (c: c.id) nixosChannels.channels)}' > $out
74 | '';
75 |
76 | mkDevShell = { inputsFrom ? [], extraPackages ? [], extraShellHook ? "" }:
77 | pkgs.mkShell {
78 | inherit inputsFrom;
79 | packages = extraPackages;
80 | shellHook = ''
81 | export NIXOS_CHANNELS='${builtins.toJSON nixosChannels}';
82 | export ELASTICSEARCH_MAPPING_SCHEMA_VERSION="${version}";
83 | '' + extraShellHook;
84 | };
85 | in rec {
86 |
87 | packages.default = packages.flake-info;
88 | packages.flake-info = import ./flake-info { inherit pkgs nixosChannels; };
89 | packages.frontend = import ./frontend { inherit pkgs nixosChannels version; };
90 | packages.nixosChannels = nixosChannelsFile;
91 |
92 | devShells.default = mkDevShell {
93 | inputsFrom = [
94 | packages.flake-info
95 | packages.frontend
96 | ];
97 | extraPackages = [
98 | pkgs.rustfmt
99 | ];
100 | extraShellHook = ''
101 | export RUST_SRC_PATH="${pkgs.rustPlatform.rustLibSrc}";
102 | export NIXPKGS_PANDOC_FILTERS_PATH="${packages.flake-info.NIXPKGS_PANDOC_FILTERS_PATH}";
103 | export PATH=$PWD/frontend/node_modules/.bin:$PATH
104 | '';
105 | };
106 |
107 | devShells.flake-info = mkDevShell {
108 | inputsFrom = [packages.flake-info];
109 | extraPackages = [pkgs.rustfmt];
110 | extraShellHook = ''
111 | export RUST_SRC_PATH="${pkgs.rustPlatform.rustLibSrc}";
112 | export NIXPKGS_PANDOC_FILTERS_PATH="${packages.flake-info.NIXPKGS_PANDOC_FILTERS_PATH}";
113 | '';
114 | };
115 |
116 | devShells.frontend = mkDevShell {
117 | inputsFrom = [packages.frontend] ;
118 | extraShellHook = ''
119 | export PATH=$PWD/frontend/node_modules/.bin:$PATH
120 | rm -rf frontend/node_modules
121 | ln -sf ${packages.frontend.yarnPkg}/libexec/${(builtins.parseDrvName packages.frontend.name).name}/node_modules frontend/
122 | echo "========================================================"
123 | echo "= To develop the frontend run: cd frontend && yarn dev ="
124 | echo "========================================================"
125 | '';
126 | };
127 |
128 | # XXX: for backwards compatibility
129 | devShell = warnToUpgradeNix devShells.default;
130 | defaultPackage = warnToUpgradeNix packages.default;
131 | }
132 | );
133 | }
134 |
--------------------------------------------------------------------------------
/flake-info/assets/commands/flake_info.nix:
--------------------------------------------------------------------------------
1 | { flake ? null }:
2 | let
3 | resolved = builtins.getFlake "input-flake";
4 |
5 | nixpkgs = (import {});
6 | lib = nixpkgs.lib;
7 |
8 | # filter = lib.filterAttrs (key: _ : key == "apps" || key == "packages");
9 |
10 | withSystem = fn: lib.mapAttrs (system: drvs: (fn system drvs));
11 | isValid = d:
12 | let
13 | r = builtins.tryEval (lib.isDerivation d && ! (lib.attrByPath [ "meta" "broken" ] false d) &&
14 | builtins.seq d.name true && d ? outputs);
15 | in
16 | r.success && r.value;
17 | validPkgs = lib.filterAttrs (k: v: isValid v);
18 |
19 | readPackages = system: drvs: lib.mapAttrsToList (
20 | attribute_name: drv: (
21 | {
22 | entry_type = "package";
23 | attribute_name = attribute_name;
24 | system = system;
25 | name = drv.name;
26 | # TODO consider using `builtins.parseDrvName`
27 | version = drv.version or "";
28 | outputs = drv.outputs;
29 | # paths = builtins.listToAttrs ( map (output: {name = output; value = drv.${output};}) drv.outputs );
30 | default_output = drv.outputName;
31 | }
32 | // lib.optionalAttrs (drv ? meta && drv.meta ? description) { inherit (drv.meta) description; }
33 | // lib.optionalAttrs (drv ? meta && drv.meta ? license) { inherit (drv.meta) license; }
34 | )
35 | ) (validPkgs drvs);
36 | readApps = system: apps: lib.mapAttrsToList (
37 | attribute_name: app: (
38 | {
39 | entry_type = "app";
40 | attribute_name = attribute_name;
41 | system = system;
42 | }
43 | // lib.optionalAttrs (app ? outPath) { bin = app.outPath; }
44 | // lib.optionalAttrs (app ? program) { bin = app.program; }
45 | // lib.optionalAttrs (app ? type) { type = app.type; }
46 | )
47 | ) apps;
48 |
49 | readNixOSOptions = let
50 | declarations = module: (
51 | lib.evalModules {
52 | modules = (if lib.isList module then module else [ module ]) ++ [
53 | (
54 | { ... }: {
55 | _module.check = false;
56 | }
57 | )
58 | ];
59 | specialArgs = {
60 | # !!! NixOS-specific. Unfortunately, NixOS modules can rely on the `modulesPath`
61 | # argument to import modules from the nixos tree. However, most of the time
62 | # this is done to import *profiles* which do not declare any options, so we
63 | # can allow it.
64 | modulesPath = "${nixpkgs.path}/nixos/modules";
65 | };
66 | }
67 | ).options;
68 |
69 | cleanUpOption = extraAttrs: opt:
70 | let
71 | applyOnAttr = n: f: lib.optionalAttrs (builtins.hasAttr n opt) { ${n} = f opt.${n}; };
72 | mkDeclaration = decl:
73 | let
74 | discard = lib.concatStringsSep "/" (lib.take 4 (lib.splitString "/" decl)) + "/";
75 | path = if lib.hasPrefix builtins.storeDir decl then lib.removePrefix discard decl else decl;
76 | in
77 | path;
78 |
79 | # Replace functions by the string
80 | substFunction = x:
81 | if builtins.isAttrs x then
82 | lib.mapAttrs (_:substFunction ) x
83 | else if builtins.isList x then
84 | map substFunction x
85 | else if lib.isFunction x then
86 | "function"
87 | else
88 | x;
89 | in
90 | opt
91 | // { entry_type = "option"; }
92 | // applyOnAttr "default" substFunction
93 | // applyOnAttr "example" substFunction # (_: { __type = "function"; })
94 | // applyOnAttr "type" substFunction
95 | // applyOnAttr "declarations" (map mkDeclaration)
96 | // extraAttrs;
97 | in
98 | { module, modulePath ? null }: let
99 | opts = lib.optionAttrSetToDocList (declarations module);
100 | extraAttrs = lib.optionalAttrs (modulePath != null) {
101 | flake = modulePath;
102 | };
103 | in
104 | map (cleanUpOption extraAttrs) (lib.filter (x: x.visible && !x.internal && lib.head x.loc != "_module") opts);
105 |
106 | readFlakeOptions = let
107 | nixosModulesOpts = builtins.concatLists (lib.mapAttrsToList (moduleName: module:
108 | readNixOSOptions {
109 | inherit module;
110 | modulePath = [ flake moduleName ];
111 | }
112 | ) (resolved.nixosModules or {}));
113 |
114 | nixosModuleOpts = lib.optionals (resolved ? nixosModule) (
115 | readNixOSOptions {
116 | module = resolved.nixosModule;
117 | modulePath = [ flake ];
118 | }
119 | );
120 | in
121 | # We assume that `nixosModules` includes `nixosModule` when there
122 | # are multiple modules
123 | if nixosModulesOpts != [] then nixosModulesOpts else nixosModuleOpts;
124 |
125 | read = reader: set: lib.flatten (lib.attrValues (withSystem reader set));
126 |
127 | legacyPackages' = read readPackages (resolved.legacyPackages or {});
128 | packages' = read readPackages (resolved.packages or {});
129 |
130 | apps' = read readApps (resolved.apps or {});
131 |
132 |
133 | collectSystems = lib.lists.foldr (
134 | drv@{ attribute_name, system, ... }: set:
135 | let
136 | present = set."${attribute_name}" or ({ platforms = []; } // drv);
137 |
138 | drv' = present // {
139 | platforms = present.platforms ++ [ system ];
140 | };
141 | drv'' = removeAttrs drv' [ "system" ];
142 | in
143 | set // {
144 | ${attribute_name} = drv'';
145 | }
146 | ) {};
147 |
148 | in
149 |
150 | rec {
151 | legacyPackages = lib.attrValues (collectSystems legacyPackages');
152 | packages = lib.attrValues (collectSystems packages');
153 | apps = lib.attrValues (collectSystems apps');
154 | options = readFlakeOptions;
155 | all = packages ++ apps ++ options;
156 |
157 | # nixpkgs-specific, doesn't use the flake argument
158 | nixos-options = lib.mapAttrsToList (name: option: option // { inherit name; })
159 | (builtins.fromJSON (builtins.unsafeDiscardStringContext (builtins.readFile
160 | "${(import {}).options}/share/doc/nixos/options.json")));
161 | }
162 |
--------------------------------------------------------------------------------
/frontend/src/Route.elm:
--------------------------------------------------------------------------------
1 | module Route exposing
2 | ( Route(..)
3 | , SearchArgs
4 | , SearchRoute
5 | , SearchType(..)
6 | , allTypes
7 | , fromUrl
8 | , href
9 | , replaceUrl
10 | , routeToString
11 | , searchTypeToString
12 | , searchTypeToTitle
13 | )
14 |
15 | import Browser.Navigation
16 | import Html
17 | import Html.Attributes
18 | import Route.SearchQuery exposing (SearchQuery)
19 | import Url
20 | import Url.Builder exposing (QueryParameter)
21 | import Url.Parser exposing ((>), (>))
22 | import Url.Parser.Query
23 |
24 |
25 |
26 | -- ROUTING
27 |
28 |
29 | type alias SearchArgs =
30 | { query : Maybe SearchQuery
31 | , channel : Maybe String
32 | , show : Maybe String
33 | , from : Maybe Int
34 | , size : Maybe Int
35 | , buckets : Maybe String
36 | , sort : Maybe String
37 | , type_ : Maybe SearchType
38 | }
39 |
40 |
41 | type SearchType
42 | = OptionSearch
43 | | PackageSearch
44 |
45 |
46 |
47 | -- | FlakeSearch
48 |
49 |
50 | allTypes : List SearchType
51 | allTypes =
52 | [ PackageSearch, OptionSearch ]
53 |
54 |
55 | searchTypeFromString : String -> Maybe SearchType
56 | searchTypeFromString string =
57 | case string of
58 | "options" ->
59 | Just OptionSearch
60 |
61 | "packages" ->
62 | Just PackageSearch
63 |
64 | -- "flakes" ->
65 | -- Just FlakeSearch
66 | _ ->
67 | Nothing
68 |
69 |
70 | searchTypeToString : SearchType -> String
71 | searchTypeToString stype =
72 | case stype of
73 | OptionSearch ->
74 | "options"
75 |
76 | PackageSearch ->
77 | "packages"
78 |
79 |
80 |
81 | -- FlakeSearch ->
82 | -- "flakes"
83 |
84 |
85 | searchTypeToTitle : SearchType -> String
86 | searchTypeToTitle stype =
87 | case stype of
88 | OptionSearch ->
89 | "Options"
90 |
91 | PackageSearch ->
92 | "Packages"
93 |
94 |
95 |
96 | -- FlakeSearch ->
97 | -- "flakes"
98 |
99 |
100 | type alias SearchRoute =
101 | SearchArgs -> Route
102 |
103 |
104 | searchQueryParser : Url.Url -> Url.Parser.Parser (SearchArgs -> msg) msg
105 | searchQueryParser url =
106 | let
107 | rawQuery =
108 | Route.SearchQuery.toRawQuery url
109 |
110 | maybeQuery =
111 | Maybe.andThen (Route.SearchQuery.searchString "query") rawQuery
112 | in
113 | Url.Parser.map (SearchArgs maybeQuery) <|
114 | Url.Parser.top
115 | > Url.Parser.Query.string "channel"
116 | > Url.Parser.Query.string "show"
117 | > Url.Parser.Query.int "from"
118 | > Url.Parser.Query.int "size"
119 | > Url.Parser.Query.string "buckets"
120 | > Url.Parser.Query.string "sort"
121 | > Url.Parser.Query.map (Maybe.andThen searchTypeFromString) (Url.Parser.Query.string "type")
122 |
123 |
124 | searchArgsToUrl : SearchArgs -> ( List QueryParameter, Maybe ( String, Route.SearchQuery.SearchQuery ) )
125 | searchArgsToUrl args =
126 | ( List.filterMap identity
127 | [ Maybe.map (Url.Builder.string "channel") args.channel
128 | , Maybe.map (Url.Builder.string "show") args.show
129 | , Maybe.map (Url.Builder.int "from") args.from
130 | , Maybe.map (Url.Builder.int "size") args.size
131 | , Maybe.map (Url.Builder.string "buckets") args.buckets
132 | , Maybe.map (Url.Builder.string "sort") args.sort
133 | , Maybe.map (Url.Builder.string "type") <| Maybe.map searchTypeToString args.type_
134 | ]
135 | , Maybe.map (Tuple.pair "query") args.query
136 | )
137 |
138 |
139 | type Route
140 | = NotFound
141 | | Home
142 | | Packages SearchArgs
143 | | Options SearchArgs
144 | | Flakes SearchArgs
145 |
146 |
147 | parser : Url.Url -> Url.Parser.Parser (Route -> msg) msg
148 | parser url =
149 | Url.Parser.oneOf
150 | [ Url.Parser.map Home Url.Parser.top
151 | , Url.Parser.map NotFound <| Url.Parser.s "not-found"
152 | , Url.Parser.map Packages <| Url.Parser.s "packages" > searchQueryParser url
153 | , Url.Parser.map Options <| Url.Parser.s "options" > searchQueryParser url
154 | , Url.Parser.map Flakes <| Url.Parser.s "flakes" > searchQueryParser url
155 | ]
156 |
157 |
158 |
159 | -- PUBLIC HELPERS
160 |
161 |
162 | href : Route -> Html.Attribute msg
163 | href targetRoute =
164 | Html.Attributes.href (routeToString targetRoute)
165 |
166 |
167 | replaceUrl : Browser.Navigation.Key -> Route -> Cmd msg
168 | replaceUrl navKey route =
169 | Browser.Navigation.replaceUrl navKey (routeToString route)
170 |
171 |
172 | fromUrl : Url.Url -> Maybe Route
173 | fromUrl url =
174 | -- The RealWorld spec treats the fragment like a path.
175 | -- This makes it *literally* the path, so we can proceed
176 | -- with parsing as if it had been a normal path all along.
177 | --{ url | path = Maybe.withDefault "" url.fragment, fragment = Nothing }
178 | Url.Parser.parse (parser url) url
179 |
180 |
181 |
182 | -- INTERNAL
183 |
184 |
185 | routeToString : Route -> String
186 | routeToString =
187 | let
188 | buildString ( path, query, searchQuery ) =
189 | Route.SearchQuery.absolute path query <|
190 | Maybe.withDefault [] <|
191 | Maybe.map List.singleton searchQuery
192 | in
193 | buildString << routeToPieces
194 |
195 |
196 | routeToPieces : Route -> ( List String, List QueryParameter, Maybe ( String, Route.SearchQuery.SearchQuery ) )
197 | routeToPieces page =
198 | case page of
199 | Home ->
200 | ( [], [], Nothing )
201 |
202 | NotFound ->
203 | ( [ "not-found" ], [], Nothing )
204 |
205 | Packages searchArgs ->
206 | searchArgsToUrl searchArgs
207 | |> (\( query, raw ) -> ( [ "packages" ], query, raw ))
208 |
209 | Options searchArgs ->
210 | searchArgsToUrl searchArgs
211 | |> (\( query, raw ) -> ( [ "options" ], query, raw ))
212 |
213 | Flakes searchArgs ->
214 | searchArgsToUrl searchArgs
215 | |> (\( query, raw ) -> ( [ "flakes" ], query, raw ))
216 |
--------------------------------------------------------------------------------
/flake-info/README.md:
--------------------------------------------------------------------------------
1 | # Flake Info
2 |
3 | A tool that fetches packages and apps from nix flakes.
4 |
5 | ## Usage
6 |
7 | ```
8 | flake-info 0.3.0
9 | Extracts various information from a given flake
10 |
11 | USAGE:
12 | flake-info [FLAGS] [OPTIONS] [extra]...
13 |
14 | FLAGS:
15 | --push Push to Elasticsearch (Configure using FI_ES_* environment variables)
16 | -h, --help Prints help information
17 | --json Print ElasticSeach Compatible JSON output
18 | -V, --version Prints version information
19 |
20 | OPTIONS:
21 | --elastic-exists
22 | How to react to existing indices [env: FI_ES_EXISTS_STRATEGY=] [default: abort] [possible values: Abort,
23 | Ignore, Recreate]
24 | --elastic-index-name Name of the index to store results to [env: FI_ES_INDEX=]
25 | -p, --elastic-pw
26 | Elasticsearch password (unimplemented) [env: FI_ES_PASSWORD=]
27 |
28 | --elastic-schema-version
29 | Which schema version to associate with the operation [env: FI_ES_VERSION=]
30 |
31 | --elastic-url
32 | Elasticsearch instance url [env: FI_ES_URL=] [default: http://localhost:9200]
33 |
34 | -u, --elastic-user Elasticsearch username (unimplemented) [env: FI_ES_USER=]
35 | -k, --kind
36 | Kind of data to extract (packages|options|apps|all) [default: all]
37 |
38 |
39 | ARGS:
40 | ... Extra arguments that are passed to nix as it
41 |
42 | SUBCOMMANDS:
43 | flake
44 | group
45 | help Prints this message or the help of the given subcommand(s)
46 | nixpkgs
47 | ```
48 |
49 | ### flake
50 |
51 | Flakes can be imported using the flake subcommand
52 |
53 | ```
54 | USAGE:
55 | flake-info flake [FLAGS]
56 |
57 | FLAGS:
58 | --gc Whether to gc the store after info or not
59 | -h, --help Prints help information
60 | --temp-store Whether to use a temporary store or not. Located at /tmp/flake-info-store
61 | -V, --version Prints version information
62 |
63 | ARGS:
64 | Flake identifier passed to nix to gather information about
65 | ```
66 |
67 | The `` argument should contain a valid reference to a flake. It accepts all formats nix accepts:
68 |
69 | > use git+ to checkout a git repository at
70 | > use /local/absolute/path or ./relative/path to load a local source
71 | > use gitlab://github://sourcehut:/ to
72 | > shortcut gitlab, github or sourcehut repositories
73 |
74 |
75 | Optionally, analyzing can be done in a temporary store enabled by the `--temp-store` option.
76 |
77 | #### Example
78 |
79 | ```
80 | $ flake-info flake github:ngi-nix/offen
81 | ```
82 |
83 | ### nixpkgs
84 |
85 | nixpkgs currently have to be imported in a different way. This is what the `nixpkgs` subcommand exists for.
86 |
87 | It takes any valid git reference to the upstream [`nixos/nixpkgs`](https://github.com/iixos/nixpkgs/) repo as an argument and produces a complete output.
88 |
89 | **This operation may take a short while and produces lots of output**
90 |
91 | #### Example
92 |
93 | ```
94 | $ flake-info nixpkgs nixos-21.05
95 | ```
96 |
97 | ### group
98 |
99 | to perform a bulk import grouping multiple inputs under the same name/index use the group command.
100 |
101 | It expects a JSON file as input that contains references to flakes or nixpkgs. If those resources are on GitHub, GitLab or SourceHut they can be extended with more meta information including pinning the commit hash/ref.
102 |
103 | The second argument is the group name that is used to provide the index name.
104 |
105 | #### Example
106 |
107 | An example `targets.json` file can look like the following
108 |
109 | ```json
110 | [
111 | {
112 | "type": "git",
113 | "url": "./."
114 | },
115 | {
116 | "type": "git",
117 | "url": "github:fluffynukeit/adaspark"
118 | },
119 | {
120 | "type": "github",
121 | "owner": "ngi-nix",
122 | "repo": "offen",
123 | "hash": "4052febf151d60aa4352fa1960cf3ae088f600aa",
124 | "description": "Hier könnte Ihre Werbung stehen"
125 | }
126 | ]
127 | ```
128 |
129 | ```
130 | $ flake-info group ./targets.json small-group
131 | ```
132 |
133 | ### Elasticsearch
134 |
135 | A number of flags is dedicated to pushing to elasticsearch.
136 |
137 | ```
138 | --elastic-exists
139 | How to react to existing indices [env: FI_ES_EXISTS_STRATEGY=] [default: abort]
140 | [possible values: Abort, Ignore, Recreate]
141 | --elastic-index-name
142 | Name of the index to store results to [env: FI_ES_INDEX=]
143 | -p, --elastic-pw
144 | Elasticsearch password (unimplemented) [env: FI_ES_PASSWORD=]
145 |
146 | --elastic-schema-version
147 | Which schema version to associate with the operation [env: FI_ES_VERSION=]
148 |
149 | --elastic-url
150 | Elasticsearch instance url [env: FI_ES_URL=] [default: http://localhost:9200]
151 |
152 | -u, --elastic-user Elasticsearch username (unimplemented) [env: FI_ES_USER=]
153 | ```
154 |
155 |
156 | #### Example
157 |
158 | ```
159 | $ flake-info --push \
160 | --elastic-url http://localhost:5555 \
161 | --elastic-index-name latest-21-21.05
162 | --elastic-schema-version 21 group ./examples/ngi-nix.json ngi-nix
163 | ```
164 |
165 |
166 | ## Installation
167 |
168 | ### Preparations
169 |
170 | This tool requires your system to have Nix installed!
171 |
172 | You can install nix using this installer: https://nixos.org/guides/install-nix.html
173 | Also, see https://nixos.wiki/wiki/Nix_Installation_Guide if your system is ✨special✨.
174 |
175 | ### Preparations (Docker)
176 |
177 | If you do not want to install nix on your system, using Docker is an alternative.
178 |
179 | Enter the [nixos/nix](https://hub.docker.com/u/nixos/) docker image and proceed
180 |
181 | ### Setup nix flakes
182 |
183 | Note that you also need to have nix flakes support.
184 |
185 | Once you have nix installed run the following commands:
186 |
187 | 1. ```
188 | $ nix-shell -I nixpkgs=channel:nixos-21.05 -p nixFlakes
189 | ```
190 | to enter a shell with the preview version of nix flakes installed.
191 | 2. ```
192 | $ mkdir -p ~/.config/nix
193 | $ echo "experimental-features = nix-command flakes" > .config/nix/nix.conf
194 | ```
195 | to enable flake support
196 |
197 | ### Installation, finally
198 |
199 | This project is defined as a flake therefore you can build the tool using
200 |
201 | ```
202 | $ nix build
203 | or
204 | $ nix build github:miszkur/github-search
205 | ```
206 |
207 | Replace `build` with run if you want to run the tool directly.
208 |
--------------------------------------------------------------------------------
/flake-info/src/data/utility.rs:
--------------------------------------------------------------------------------
1 | use fancy_regex::Regex;
2 | use lazy_static::lazy_static;
3 | use serde::{Deserialize, Deserializer, Serialize, Serializer};
4 |
5 | #[derive(Debug, Clone, PartialEq)]
6 | pub struct Reverse(pub T);
7 |
8 | pub trait Reversable {
9 | fn reverse(&self) -> Self;
10 | }
11 |
12 | impl Reversable for String {
13 | fn reverse(&self) -> Self {
14 | self.chars().rev().collect::()
15 | }
16 | }
17 |
18 | impl Reversable for Vec {
19 | fn reverse(&self) -> Self {
20 | self.iter().cloned().map(|item| item.reverse()).collect()
21 | }
22 | }
23 |
24 | impl Reversable for Reverse
25 | where
26 | T: Reversable + Serialize,
27 | {
28 | fn reverse(&self) -> Self {
29 | Reverse(self.0.reverse())
30 | }
31 | }
32 |
33 | impl Serialize for Reverse
34 | where
35 | T: Reversable + Serialize,
36 | {
37 | fn serialize(&self, serializer: S) -> Result
38 | where
39 | S: serde::Serializer,
40 | {
41 | self.0.reverse().serialize(serializer)
42 | }
43 | }
44 |
45 | impl<'de, T> Deserialize<'de> for Reverse
46 | where
47 | T: Reversable + Serialize + Deserialize<'de>,
48 | {
49 | fn deserialize(deserializer: D) -> Result, D::Error>
50 | where
51 | D: Deserializer<'de>,
52 | {
53 | Ok(Reverse(T::deserialize(deserializer)?.reverse()))
54 | }
55 | }
56 |
57 | /// A utility type that can represent the presence of either a single associated
58 | /// value or a list of those. Adding absence can be achieved by wrapping the type
59 | /// in an [Option]
60 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
61 | #[serde(untagged)]
62 | pub enum OneOrMany {
63 | #[serde(serialize_with = "list")]
64 | One(T),
65 | Many(Vec),
66 | }
67 |
68 | impl OneOrMany {
69 | pub fn into_list(self) -> Vec {
70 | match self {
71 | OneOrMany::One(one) => vec![one],
72 | OneOrMany::Many(many) => many,
73 | }
74 | }
75 | }
76 |
77 | /// A utility type that flattens lists of lists as seen with `maintainers` and `platforms` on selected packages
78 | /// in an [Option]
79 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
80 | #[serde(untagged)]
81 | pub enum Flatten {
82 | #[serde(serialize_with = "list")]
83 | Single(T),
84 | Deep(Vec>),
85 | }
86 |
87 | impl Flatten {
88 | pub fn flatten(self) -> Vec {
89 | match self {
90 | Flatten::Single(s) => vec![s],
91 | Flatten::Deep(v) => v.into_iter().map(Flatten::flatten).flatten().collect(),
92 | }
93 | }
94 | }
95 |
96 | // TODO: use this or a to_ist function?
97 | /// Serialization helper that serializes single elements as a list with a single
98 | /// item
99 | pub fn list(item: &T, s: S) -> Result
100 | where
101 | T: Serialize,
102 | S: Serializer,
103 | {
104 | s.collect_seq(vec![item].iter())
105 | }
106 |
107 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
108 | pub struct AttributeQuery(Vec);
109 |
110 | lazy_static! {
111 | static ref QUERY: Regex =
112 | Regex::new(".+?(?:(?<=[a-z])(?=[1-9A-Z])|(?<=[1-9A-Z])(?=[A-Z][a-z])|[._-]|$)").unwrap();
113 | }
114 |
115 | impl AttributeQuery {
116 | pub fn new(attribute_name: &str) -> Self {
117 | const SUFFIX: &[char] = &['-', '.', '_'];
118 |
119 | let matches = QUERY
120 | .find_iter(attribute_name)
121 | .map(|found| found.unwrap().as_str())
122 | .collect::>();
123 |
124 | let tokens = (0..matches.len())
125 | .flat_map(|index| {
126 | let (_, tokens) = matches.iter().skip(index).fold(
127 | (String::new(), Vec::new()),
128 | |(prev_parts, mut tokens), part| {
129 | let token: String = prev_parts + part;
130 | tokens.push(token.trim_end_matches(SUFFIX).to_owned());
131 | (token, tokens)
132 | },
133 | );
134 |
135 | tokens
136 | })
137 | .collect::>();
138 |
139 | AttributeQuery(tokens)
140 | }
141 |
142 | pub fn query(&self) -> &[String] {
143 | &self.0
144 | }
145 | }
146 |
147 | impl Reversable for AttributeQuery {
148 | fn reverse(&self) -> Self {
149 | AttributeQuery(self.query().to_owned().reverse())
150 | }
151 | }
152 |
153 | #[cfg(test)]
154 | mod tests {
155 | use super::*;
156 |
157 | #[test]
158 | fn attr_query_test() {
159 | assert_eq!(
160 | {
161 | let mut q = AttributeQuery::new("services.nginx.extraConfig")
162 | .query()
163 | .to_owned();
164 | q.sort();
165 | q
166 | },
167 | {
168 | let mut ex = [
169 | "services.nginx.extraConfig",
170 | "services.nginx.extra",
171 | "services.nginx",
172 | "services",
173 | "nginx.extraConfig",
174 | "nginx.extra",
175 | "nginx",
176 | "extraConfig",
177 | "extra",
178 | "Config",
179 | ];
180 | ex.sort_unstable();
181 | ex
182 | },
183 | );
184 |
185 | assert_eq!(
186 | {
187 | let mut q = AttributeQuery::new("python37Packages.test1_name-test2")
188 | .query()
189 | .to_owned();
190 | q.sort();
191 | q
192 | },
193 | {
194 | let mut ex = [
195 | "python37Packages.test1_name-test2",
196 | "python37Packages.test1_name-test",
197 | "python37Packages.test1_name",
198 | "python37Packages.test1",
199 | "python37Packages.test",
200 | "python37Packages",
201 | "python37",
202 | "python",
203 | "37Packages.test1_name-test2",
204 | "37Packages.test1_name-test",
205 | "37Packages.test1_name",
206 | "37Packages.test1",
207 | "37Packages.test",
208 | "37Packages",
209 | "37",
210 | "Packages.test1_name-test2",
211 | "Packages.test1_name-test",
212 | "Packages.test1_name",
213 | "Packages.test1",
214 | "Packages.test",
215 | "Packages",
216 | "test1_name-test2",
217 | "test1_name-test",
218 | "test1_name",
219 | "test1",
220 | "test",
221 | "1_name-test2",
222 | "1_name-test",
223 | "1_name",
224 | "1",
225 | "name-test2",
226 | "name-test",
227 | "name",
228 | "test2",
229 | "test",
230 | "2",
231 | ];
232 | ex.sort_unstable();
233 | ex
234 | }
235 | );
236 | }
237 | }
238 |
--------------------------------------------------------------------------------
/frontend/src/Page/Flakes.elm:
--------------------------------------------------------------------------------
1 | module Page.Flakes exposing
2 | ( Model(..)
3 | , Msg(..)
4 | , init
5 | , makeRequest
6 | , update
7 | , view
8 | )
9 |
10 | import Browser.Navigation
11 | import Html
12 | exposing
13 | ( Html
14 | , a
15 | , div
16 | , h1
17 | , strong
18 | , text
19 | )
20 | import Html.Attributes
21 | exposing
22 | ( class
23 | , href
24 | )
25 | import Html.Events exposing (onClick)
26 | import Http exposing (Body)
27 | import Page.Options exposing (Msg(..))
28 | import Page.Packages exposing (Msg(..))
29 | import RemoteData exposing (RemoteData(..))
30 | import Route
31 | exposing
32 | ( Route(..)
33 | , SearchType(..)
34 | )
35 | import Search
36 | exposing
37 | ( Msg(..)
38 | , NixOSChannel
39 | , viewFlakes
40 | , viewResult
41 | , viewSearchInput
42 | )
43 |
44 |
45 |
46 | -- MODEL
47 |
48 |
49 | type Model
50 | = OptionModel Page.Options.Model
51 | | PackagesModel Page.Packages.Model
52 |
53 |
54 | init :
55 | Route.SearchArgs
56 | -> String
57 | -> List NixOSChannel
58 | -> Maybe Model
59 | -> ( Model, Cmd Msg )
60 | init searchArgs defaultNixOSChannel nixosChannels model =
61 | let
62 | -- init with respective module or with packages by default
63 | searchType =
64 | Maybe.withDefault PackageSearch searchArgs.type_
65 |
66 | mapEitherModel m =
67 | case ( searchType, m ) of
68 | ( OptionSearch, OptionModel model_ ) ->
69 | Tuple.mapBoth OptionModel (Cmd.map OptionsMsg) <|
70 | Page.Options.init searchArgs defaultNixOSChannel nixosChannels <|
71 | Just model_
72 |
73 | ( PackageSearch, PackagesModel model_ ) ->
74 | Tuple.mapBoth PackagesModel (Cmd.map PackagesMsg) <|
75 | Page.Packages.init searchArgs defaultNixOSChannel nixosChannels <|
76 | Just model_
77 |
78 | _ ->
79 | default
80 |
81 | default =
82 | case searchType of
83 | PackageSearch ->
84 | Tuple.mapBoth PackagesModel (Cmd.map PackagesMsg) <|
85 | Page.Packages.init searchArgs defaultNixOSChannel nixosChannels Nothing
86 |
87 | OptionSearch ->
88 | Tuple.mapBoth OptionModel (Cmd.map OptionsMsg) <|
89 | Page.Options.init searchArgs defaultNixOSChannel nixosChannels Nothing
90 |
91 | ( newModel, newCmd ) =
92 | Maybe.withDefault default <| Maybe.map mapEitherModel model
93 | in
94 | ( newModel
95 | , newCmd
96 | )
97 |
98 |
99 |
100 | -- UPDATE
101 |
102 |
103 | type Msg
104 | = OptionsMsg Page.Options.Msg
105 | | PackagesMsg Page.Packages.Msg
106 |
107 |
108 | update :
109 | Browser.Navigation.Key
110 | -> Msg
111 | -> Model
112 | -> List NixOSChannel
113 | -> ( Model, Cmd Msg )
114 | update navKey msg model nixosChannels =
115 | case ( msg, model ) of
116 | ( OptionsMsg msg_, OptionModel model_ ) ->
117 | case msg_ of
118 | Page.Options.SearchMsg subMsg ->
119 | let
120 | ( newModel, newCmd ) =
121 | Search.update
122 | Route.Flakes
123 | navKey
124 | subMsg
125 | model_
126 | nixosChannels
127 | in
128 | ( newModel, Cmd.map Page.Options.SearchMsg newCmd ) |> Tuple.mapBoth OptionModel (Cmd.map OptionsMsg)
129 |
130 | ( PackagesMsg msg_, PackagesModel model_ ) ->
131 | case msg_ of
132 | Page.Packages.SearchMsg subMsg ->
133 | let
134 | ( newModel, newCmd ) =
135 | Search.update
136 | Route.Flakes
137 | navKey
138 | subMsg
139 | model_
140 | nixosChannels
141 | in
142 | ( newModel, Cmd.map Page.Packages.SearchMsg newCmd ) |> Tuple.mapBoth PackagesModel (Cmd.map PackagesMsg)
143 |
144 | _ ->
145 | ( model, Cmd.none )
146 |
147 |
148 |
149 | -- VIEW
150 |
151 |
152 | view :
153 | List NixOSChannel
154 | -> Model
155 | -> Html Msg
156 | view nixosChannels model =
157 | let
158 | resultStatus result =
159 | case result of
160 | RemoteData.NotAsked ->
161 | "not-asked"
162 |
163 | RemoteData.Loading ->
164 | "loading"
165 |
166 | RemoteData.Success _ ->
167 | "success"
168 |
169 | RemoteData.Failure _ ->
170 | "failure"
171 |
172 | bodyTitle =
173 | [ text "Search packages and options of "
174 | , strong []
175 | [ a
176 | [ href "https://github.com/NixOS/nixos-search/blob/main/flakes/manual.toml" ]
177 | [ text "public flakes" ]
178 | ]
179 | ]
180 |
181 | mkBody categoryName model_ viewSuccess viewBuckets outMsg =
182 | div
183 | (List.append
184 | [ class <| "search-page " ++ resultStatus model_.result ]
185 | (if model_.showSort then
186 | [ onClick (outMsg ToggleSort) ]
187 |
188 | else
189 | []
190 | )
191 | )
192 | [ h1 [] bodyTitle
193 | , viewSearchInput nixosChannels outMsg categoryName Nothing model_.query
194 | , viewResult nixosChannels outMsg Route.Flakes categoryName model_ viewSuccess viewBuckets <|
195 | viewFlakes outMsg model_.channel model_.searchType
196 | ]
197 |
198 | body =
199 | case model of
200 | OptionModel model_ ->
201 | Html.map OptionsMsg <| mkBody "Options" model_ Page.Options.viewSuccess Page.Options.viewBuckets Page.Options.SearchMsg
202 |
203 | PackagesModel model_ ->
204 | Html.map PackagesMsg <| mkBody "Packages" model_ Page.Packages.viewSuccess Page.Packages.viewBuckets Page.Packages.SearchMsg
205 | in
206 | body
207 |
208 |
209 |
210 | -- API
211 |
212 |
213 | makeRequest :
214 | Search.Options
215 | -> List NixOSChannel
216 | -> SearchType
217 | -> String
218 | -> String
219 | -> Int
220 | -> Int
221 | -> Maybe String
222 | -> Search.Sort
223 | -> Cmd Msg
224 | makeRequest options nixosChannels searchType index_id query from size maybeBuckets sort =
225 | let
226 | cmd =
227 | case searchType of
228 | PackageSearch ->
229 | Search.makeRequest
230 | (makeRequestBody searchType query from size maybeBuckets sort)
231 | nixosChannels
232 | index_id
233 | Page.Packages.decodeResultItemSource
234 | Page.Packages.decodeResultAggregations
235 | options
236 | Search.QueryResponse
237 | (Just "query-packages")
238 | |> Cmd.map Page.Packages.SearchMsg
239 | |> Cmd.map PackagesMsg
240 |
241 | OptionSearch ->
242 | Search.makeRequest
243 | (makeRequestBody searchType query from size maybeBuckets sort)
244 | nixosChannels
245 | index_id
246 | Page.Options.decodeResultItemSource
247 | Page.Options.decodeResultAggregations
248 | options
249 | Search.QueryResponse
250 | (Just "query-options")
251 | |> Cmd.map Page.Options.SearchMsg
252 | |> Cmd.map OptionsMsg
253 | in
254 | cmd
255 |
256 |
257 | makeRequestBody : SearchType -> String -> Int -> Int -> Maybe String -> Search.Sort -> Body
258 | makeRequestBody searchType query from size maybeBuckets sort =
259 | case searchType of
260 | OptionSearch ->
261 | Page.Options.makeRequestBody query from size sort
262 |
263 | PackageSearch ->
264 | Page.Packages.makeRequestBody query from size maybeBuckets sort
265 |
--------------------------------------------------------------------------------
/flake-info/examples/adaspark-offen.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "flake_description": "Compilers and tools for SPARK2014 Ada development",
4 | "flake_resolved": {
5 | "type": "github",
6 | "owner": "fluffynukeit",
7 | "repo": "adaspark"
8 | },
9 | "flake_name": "adaspark",
10 | "flake_source": {
11 | "type": "git",
12 | "url": "github:fluffynukeit/adaspark"
13 | },
14 | "package_attr_name": "adaspark",
15 | "package_pname": "adaspark",
16 | "package_pversion": "",
17 | "package_platforms": [
18 | "x86_64-linux"
19 | ],
20 | "package_outputs": [
21 | "out"
22 | ],
23 | "package_license": {}
24 | },
25 | {
26 | "flake_description": "Compilers and tools for SPARK2014 Ada development",
27 | "flake_resolved": {
28 | "type": "github",
29 | "owner": "fluffynukeit",
30 | "repo": "adaspark"
31 | },
32 | "flake_name": "adaspark",
33 | "flake_source": {
34 | "type": "git",
35 | "url": "github:fluffynukeit/adaspark"
36 | },
37 | "package_attr_name": "asis",
38 | "package_pname": "ASIS",
39 | "package_pversion": "gcc-10.1.0",
40 | "package_platforms": [
41 | "x86_64-linux"
42 | ],
43 | "package_outputs": [
44 | "out"
45 | ],
46 | "package_license": {}
47 | },
48 | {
49 | "flake_description": "Compilers and tools for SPARK2014 Ada development",
50 | "flake_resolved": {
51 | "type": "github",
52 | "owner": "fluffynukeit",
53 | "repo": "adaspark"
54 | },
55 | "flake_name": "adaspark",
56 | "flake_source": {
57 | "type": "git",
58 | "url": "github:fluffynukeit/adaspark"
59 | },
60 | "package_attr_name": "aunit",
61 | "package_pname": "AUnit",
62 | "package_pversion": "20.2",
63 | "package_platforms": [
64 | "x86_64-linux"
65 | ],
66 | "package_outputs": [
67 | "out"
68 | ],
69 | "package_license": {}
70 | },
71 | {
72 | "flake_description": "Compilers and tools for SPARK2014 Ada development",
73 | "flake_resolved": {
74 | "type": "github",
75 | "owner": "fluffynukeit",
76 | "repo": "adaspark"
77 | },
78 | "flake_name": "adaspark",
79 | "flake_source": {
80 | "type": "git",
81 | "url": "github:fluffynukeit/adaspark"
82 | },
83 | "package_attr_name": "gnat",
84 | "package_pname": "gnat-10.2.0",
85 | "package_pversion": "10.2.0",
86 | "package_platforms": [
87 | "x86_64-linux"
88 | ],
89 | "package_outputs": [
90 | "out",
91 | "man",
92 | "info"
93 | ],
94 | "package_description": "GNU Compiler Collection, version 10.2.0 (wrapper script)",
95 | "package_license": {
96 | "license_long": "GNU General Public License v3.0 or later",
97 | "license": "gpl3Plus",
98 | "license_url": "https://spdx.org/licenses/GPL-3.0-or-later.html"
99 | }
100 | },
101 | {
102 | "flake_description": "Compilers and tools for SPARK2014 Ada development",
103 | "flake_resolved": {
104 | "type": "github",
105 | "owner": "fluffynukeit",
106 | "repo": "adaspark"
107 | },
108 | "flake_name": "adaspark",
109 | "flake_source": {
110 | "type": "git",
111 | "url": "github:fluffynukeit/adaspark"
112 | },
113 | "package_attr_name": "gnat_util",
114 | "package_pname": "gnat_util",
115 | "package_pversion": "10.1.0",
116 | "package_platforms": [
117 | "x86_64-linux"
118 | ],
119 | "package_outputs": [
120 | "out"
121 | ],
122 | "package_license": {}
123 | },
124 | {
125 | "flake_description": "Compilers and tools for SPARK2014 Ada development",
126 | "flake_resolved": {
127 | "type": "github",
128 | "owner": "fluffynukeit",
129 | "repo": "adaspark"
130 | },
131 | "flake_name": "adaspark",
132 | "flake_source": {
133 | "type": "git",
134 | "url": "github:fluffynukeit/adaspark"
135 | },
136 | "package_attr_name": "gnatcoll-core",
137 | "package_pname": "gnatcoll-core",
138 | "package_pversion": "20.2",
139 | "package_platforms": [
140 | "x86_64-linux"
141 | ],
142 | "package_outputs": [
143 | "out"
144 | ],
145 | "package_license": {}
146 | },
147 | {
148 | "flake_description": "Compilers and tools for SPARK2014 Ada development",
149 | "flake_resolved": {
150 | "type": "github",
151 | "owner": "fluffynukeit",
152 | "repo": "adaspark"
153 | },
154 | "flake_name": "adaspark",
155 | "flake_source": {
156 | "type": "git",
157 | "url": "github:fluffynukeit/adaspark"
158 | },
159 | "package_attr_name": "gpr",
160 | "package_pname": "gprbuild",
161 | "package_pversion": "20.2",
162 | "package_platforms": [
163 | "x86_64-linux"
164 | ],
165 | "package_outputs": [
166 | "out"
167 | ],
168 | "package_license": {}
169 | },
170 | {
171 | "flake_description": "Compilers and tools for SPARK2014 Ada development",
172 | "flake_resolved": {
173 | "type": "github",
174 | "owner": "fluffynukeit",
175 | "repo": "adaspark"
176 | },
177 | "flake_name": "adaspark",
178 | "flake_source": {
179 | "type": "git",
180 | "url": "github:fluffynukeit/adaspark"
181 | },
182 | "package_attr_name": "spark",
183 | "package_pname": "SPARK2014",
184 | "package_pversion": "20.2",
185 | "package_platforms": [
186 | "x86_64-linux"
187 | ],
188 | "package_outputs": [
189 | "out"
190 | ],
191 | "package_license": {}
192 | },
193 | {
194 | "flake_description": "Compilers and tools for SPARK2014 Ada development",
195 | "flake_resolved": {
196 | "type": "github",
197 | "owner": "fluffynukeit",
198 | "repo": "adaspark"
199 | },
200 | "flake_name": "adaspark",
201 | "flake_source": {
202 | "type": "git",
203 | "url": "github:fluffynukeit/adaspark"
204 | },
205 | "package_attr_name": "xmlada",
206 | "package_pname": "xmlada",
207 | "package_pversion": "20.2",
208 | "package_platforms": [
209 | "x86_64-linux"
210 | ],
211 | "package_outputs": [
212 | "out"
213 | ],
214 | "package_license": {}
215 | },
216 | {
217 | "flake_description": "Offen, a fair web analytics tool",
218 | "flake_resolved": {
219 | "type": "github",
220 | "owner": "ngi-nix",
221 | "repo": "offen"
222 | },
223 | "flake_name": "offen",
224 | "flake_source": {
225 | "type": "github",
226 | "owner": "ngi-nix",
227 | "repo": "offen",
228 | "description": "Hier könnte Ihre Werbung stehen",
229 | "git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa"
230 | },
231 | "package_attr_name": "license_finder",
232 | "package_pname": "license_finder",
233 | "package_pversion": "",
234 | "package_platforms": [
235 | "x86_64-linux"
236 | ],
237 | "package_outputs": [
238 | "out"
239 | ],
240 | "package_license": {}
241 | },
242 | {
243 | "flake_description": "Offen, a fair web analytics tool",
244 | "flake_resolved": {
245 | "type": "github",
246 | "owner": "ngi-nix",
247 | "repo": "offen"
248 | },
249 | "flake_name": "offen",
250 | "flake_source": {
251 | "type": "github",
252 | "owner": "ngi-nix",
253 | "repo": "offen",
254 | "description": "Hier könnte Ihre Werbung stehen",
255 | "git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa"
256 | },
257 | "package_attr_name": "offen",
258 | "package_pname": "offen-20210115",
259 | "package_pversion": "20210115",
260 | "package_platforms": [
261 | "x86_64-linux"
262 | ],
263 | "package_outputs": [
264 | "out"
265 | ],
266 | "package_license": {}
267 | },
268 | {
269 | "flake_description": "Offen, a fair web analytics tool",
270 | "flake_resolved": {
271 | "type": "github",
272 | "owner": "ngi-nix",
273 | "repo": "offen"
274 | },
275 | "flake_name": "offen",
276 | "flake_source": {
277 | "type": "github",
278 | "owner": "ngi-nix",
279 | "repo": "offen",
280 | "description": "Hier könnte Ihre Werbung stehen",
281 | "git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa"
282 | },
283 | "package_attr_name": "offen-auditorium",
284 | "package_pname": "offen-auditorium",
285 | "package_pversion": "20210115",
286 | "package_platforms": [
287 | "x86_64-linux"
288 | ],
289 | "package_outputs": [
290 | "out"
291 | ],
292 | "package_license": {}
293 | },
294 | {
295 | "flake_description": "Offen, a fair web analytics tool",
296 | "flake_resolved": {
297 | "type": "github",
298 | "owner": "ngi-nix",
299 | "repo": "offen"
300 | },
301 | "flake_name": "offen",
302 | "flake_source": {
303 | "type": "github",
304 | "owner": "ngi-nix",
305 | "repo": "offen",
306 | "description": "Hier könnte Ihre Werbung stehen",
307 | "git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa"
308 | },
309 | "package_attr_name": "offen-script",
310 | "package_pname": "offen-script",
311 | "package_pversion": "20210115",
312 | "package_platforms": [
313 | "x86_64-linux"
314 | ],
315 | "package_outputs": [
316 | "out"
317 | ],
318 | "package_license": {}
319 | },
320 | {
321 | "flake_description": "Offen, a fair web analytics tool",
322 | "flake_resolved": {
323 | "type": "github",
324 | "owner": "ngi-nix",
325 | "repo": "offen"
326 | },
327 | "flake_name": "offen",
328 | "flake_source": {
329 | "type": "github",
330 | "owner": "ngi-nix",
331 | "repo": "offen",
332 | "description": "Hier könnte Ihre Werbung stehen",
333 | "git_ref": "4052febf151d60aa4352fa1960cf3ae088f600aa"
334 | },
335 | "package_attr_name": "offen-vault",
336 | "package_pname": "offen-vault",
337 | "package_pversion": "20210115",
338 | "package_platforms": [
339 | "x86_64-linux"
340 | ],
341 | "package_outputs": [
342 | "out"
343 | ],
344 | "package_license": {}
345 | }
346 | ]
347 |
--------------------------------------------------------------------------------
/frontend/src/index.scss:
--------------------------------------------------------------------------------
1 | /* ------------------------------------------------------------------------- */
2 | /* -- Utils ---------------------------------------------------------------- */
3 | /* ------------------------------------------------------------------------- */
4 |
5 | @mixin terminal() {
6 | background: #333;
7 | color: #fff;
8 | margin: 0;
9 | }
10 |
11 |
12 | @mixin search-result-item() {
13 | .result-item-show-more-wrapper {
14 | text-align: center;
15 | }
16 |
17 | // show longer details link
18 | .result-item-show-more {
19 | margin: 0 auto;
20 | display: none;
21 | text-align: center;
22 | text-decoration: none;
23 | line-height: 1.5em;
24 | color: #666;
25 | background: #FFF;
26 | padding: 0 1em;
27 | position: relative;
28 | top: 0.75em;
29 | outline: none;
30 | }
31 | &.opened,
32 | &:hover {
33 | padding-bottom: 0;
34 |
35 | .result-item-show-more {
36 | display: inline-block;
37 | padding-top: 0.5em;
38 | }
39 | }
40 | }
41 |
42 | .package-name {
43 | user-select: all;
44 | }
45 |
46 | /* ------------------------------------------------------------------------- */
47 | /* -- Layout --------------------------------------------------------------- */
48 | /* ------------------------------------------------------------------------- */
49 |
50 | body {
51 | position: relative;
52 | min-height: 100vh;
53 | overflow-y: auto;
54 |
55 | & > div:first-child {
56 | position: relative;
57 | min-height: 100vh;
58 | }
59 | }
60 |
61 | .code-block {
62 | display: block;
63 | cursor: text;
64 | }
65 |
66 | .shell-command:before {
67 | content: "$ ";
68 | }
69 |
70 | #content {
71 | padding-bottom: 4rem;
72 | }
73 |
74 | footer {
75 | position: absolute;
76 | bottom: 0;
77 | width: 100%;
78 | height: 4rem;
79 | }
80 |
81 | header .navbar.navbar-static-top {
82 | .brand {
83 | padding-bottom: 0;
84 | }
85 | img.logo {
86 | margin-top: -5px;
87 | padding-right: 5px;
88 | line-height: 25px;
89 | height: 25px;
90 | }
91 | ul.nav > li {
92 | line-height: 20px;
93 | sup {
94 | margin-left: 0.5em;
95 | }
96 | }
97 | }
98 |
99 | // Search seatch
100 | .search-page {
101 |
102 | &.not-asked {
103 |
104 | & > h1 {
105 | margin-top: 2.5em;
106 | margin-bottom: 0.8em;
107 | }
108 | }
109 |
110 | // Search section title title
111 | & > h1 {
112 | font-weight: normal;
113 | font-size: 2.3em;
114 |
115 | &:before {
116 | content: "\2315";
117 | display: inline-block;
118 | font-size: 1.5em;
119 | margin-right: 0.2em;
120 | -moz-transform: scale(-1, 1);
121 | -webkit-transform: scale(-1, 1);
122 | -o-transform: scale(-1, 1);
123 | -ms-transform: scale(-1, 1);
124 | transform: scale(-1, 1);
125 | }
126 | }
127 |
128 | // Search input section
129 | & > .search-input {
130 |
131 | // Search Input and Button
132 | & > div:nth-child(1) {
133 | display: grid;
134 | grid-template-columns: auto 8em;
135 |
136 | & > div > input {
137 | font-size: 18px;
138 | height: 40px;
139 | width: 100%;
140 | }
141 |
142 | & > button {
143 | font-size: 24px;
144 | height: 50px;
145 | min-width: 4em;
146 | }
147 | }
148 |
149 | // List of channels
150 | & > div:nth-child(2) {
151 | margin-bottom: 0.5em;
152 |
153 | // "Channels: " label
154 | & > div > h4 {
155 | display: inline;
156 | vertical-align: middle;
157 | font-size: 1.2em;
158 | margin-left: 0.2em;
159 | }
160 | }
161 | }
162 |
163 | // Loader during loading the search results
164 | & > .loader-wrapper > h2 {
165 | position: absolute;
166 | top: 3em;
167 | width: 100%;
168 | text-align: center;
169 | }
170 |
171 | & > .search-no-results {
172 | padding: 2em 1em;
173 | text-align: center;
174 | margin-bottom: 2em;
175 |
176 | & > h2 {
177 | margin-top: 0;
178 | }
179 | }
180 |
181 |
182 | .search-result-button {
183 | list-style: none;
184 | margin: 0;
185 | padding: 0;
186 |
187 | & > li {
188 | display: inline-block;
189 |
190 | &:first-child:not(:last-child):after {
191 | content: "→";
192 | margin: 0 0.2em;
193 | }
194 |
195 | & > a:hover {
196 | text-decoration: underline;
197 | }
198 | }
199 | }
200 |
201 | // Buckets
202 | ul.search-sidebar {
203 | width: 25em;
204 |
205 | list-style: none;
206 | margin: 0 1em 0 0;
207 |
208 | & > li {
209 | margin-bottom: 1em;
210 | border: 1px solid #ccc;
211 | padding: 1em;
212 | border-radius: 4px;
213 |
214 | & > ul {
215 | list-style: none;
216 | margin: 0;
217 |
218 | & > li {
219 | margin-bottom: 0.2em;
220 |
221 | &.header {
222 | font-size: 1.2em;
223 | font-weight: bold;
224 | margin-bottom: 0.5em;
225 | }
226 |
227 | & > a {
228 | display: grid;
229 | grid-template-columns: auto max-content;
230 | color: #333;
231 | padding: 0.5em 0.5em 0.5em 1em;
232 | text-decoration: none;
233 |
234 | &:hover {
235 | text-decoration: none;
236 | background: #eee;
237 | border-radius: 4px;
238 | }
239 |
240 | & > span:first-child {
241 | overflow: hidden;
242 | }
243 | & > span:last-child {
244 | text-align: right;
245 | margin-left: 0.3em;
246 | }
247 |
248 | &.selected {
249 | background: #0081c2;
250 | color: #FFF;
251 | border-radius: 4px;
252 | position: relative;
253 | & > span:last-child {
254 | display: none;
255 |
256 | }
257 | }
258 |
259 | & .close {
260 | opacity: 1;
261 | text-shadow: none;
262 | color: inherit;
263 | font-size: inherit;
264 | padding-left: .5em;
265 | padding-right: .5em;
266 | }
267 | }
268 | }
269 | }
270 | }
271 | }
272 |
273 |
274 | & > .search-results {
275 | display: flex;
276 | flex-direction: row;
277 |
278 | // Results section
279 | & > div {
280 | width: 100%;
281 |
282 | // Search results header
283 | & > :nth-child(1) {
284 |
285 | // Dropdown to show sorting options
286 | & > div:nth-child(1) {
287 |
288 | & > button {
289 | & > .selected {
290 | margin-right: 0.5em;
291 | }
292 | }
293 |
294 | & > ul > li {
295 |
296 | & > a {
297 | padding: 3px 10px;
298 | }
299 |
300 | & > a:before {
301 | display: inline-block;
302 | content: " ";
303 | width: 24.5px;
304 | }
305 |
306 | &.selected > a:before {
307 | content: "\2714";
308 | }
309 | }
310 |
311 | & > ul > li.header {
312 | font-weight: bold;
313 | padding: 3px 10px 0 10px;
314 | }
315 |
316 | & > ul > li.header:before,
317 | & > ul > li.divider:before {
318 | display: none;
319 | }
320 | }
321 |
322 | // Text that displays number of results
323 | & > div:nth-child(2) {
324 | font-size: 1.7em;
325 | line-height: 1.3em;
326 |
327 | & > p {
328 | font-size: 0.7em;
329 | }
330 | }
331 | }
332 |
333 | // Search results list
334 | & > :nth-child(2) {
335 | list-style: none;
336 | margin: 2em 0 0 0;
337 |
338 | // Result item
339 | & > li {
340 | border-bottom: 1px solid #ccc;
341 | padding-bottom: 2em;
342 | margin-bottom: 2em;
343 |
344 | &:last-child {
345 | border-bottom: 0;
346 | }
347 |
348 | // Attribute name or option name
349 | & > :nth-child(1) {
350 | background: inherit;
351 | border: 0;
352 | padding: 0;
353 | color: #08c;
354 | font-size: 1.5em;
355 | margin-bottom: 0.5em;
356 | text-align: left;
357 | display: block;
358 | }
359 |
360 | &.package {
361 | @include search-result-item;
362 |
363 | // Description
364 | & > :nth-child(2) {
365 | font-size: 1.2em;
366 | margin-bottom: 0.5em;
367 | text-align: left;
368 | }
369 |
370 | // short details of a pacakge
371 | & > :nth-child(3) {
372 | color: #666;
373 | list-style: none;
374 | text-align: left;
375 | margin: 0;
376 |
377 | & > li {
378 | display: inline-block;
379 | margin-right: 1em;
380 | }
381 | & > li:last-child {
382 | margin-right: 0;
383 | }
384 | }
385 |
386 | // longer details of a pacakge
387 | & > :nth-child(5) {
388 | margin: 2em 0 1em 1em;
389 | text-align: left;
390 |
391 | // long description of a package
392 | & > :nth-child(1) {
393 | margin-top: 1em;
394 | }
395 |
396 | // how to install a package
397 | & > :nth-child(2) {
398 |
399 | h4 {
400 | font-size: 1.2em;
401 | line-height: 1em;
402 | float: left;
403 | }
404 |
405 | ul.nav-tabs {
406 | margin: 0;
407 |
408 | & > li > a {
409 | margin-right: 0;
410 | }
411 | }
412 |
413 | div.tab-content {
414 | padding: 1em;
415 | border: 1px solid #ddd;
416 | border-top: 0;
417 | }
418 |
419 | pre {
420 | @include terminal;
421 | }
422 |
423 | }
424 |
425 | // maintainers and platforms
426 | & > :nth-child(3) {
427 | margin-top: 1em;
428 | display: grid;
429 | grid-template-columns: auto auto;
430 | }
431 | }
432 | }
433 |
434 | &.option {
435 | margin: 0;
436 | padding: 0;
437 |
438 | & > :nth-child(1) {
439 | padding: 0.5em 0;
440 | }
441 |
442 | // short details of a pacakge
443 | & > :nth-child(2) {
444 | margin: 2em 0 1em 1em;
445 | display: grid;
446 | grid-template-columns: 100px 1fr;
447 | column-gap: 1em;
448 | row-gap: 0.5em;
449 |
450 | & > div:nth-child(2n+1) {
451 | font-weight: bold;
452 | text-align: right;
453 | }
454 |
455 | & > div:nth-child(2n) {
456 | pre {
457 | background: transparent;
458 | margin: 0;
459 | padding: 0;
460 | border: 0;
461 | vertical-align: inherit;
462 | display: inline;
463 | }
464 |
465 | pre code {
466 | background: #333;
467 | color: #fff;
468 | padding: 0.5em
469 | }
470 | }
471 |
472 | }
473 | }
474 | }
475 |
476 | }
477 |
478 | // Search results footer
479 | & > :nth-child(3) {
480 | margin-top: 1em;
481 |
482 | & > ul > li > a {
483 | cursor: pointer;
484 | margin: 0 2px;
485 | }
486 | }
487 | }
488 | }
489 | }
490 |
491 | /* ------------------------------------------------------------------------- */
492 | /* -- Loader --------------------------------------------------------------- */
493 | /* ------------------------------------------------------------------------- */
494 |
495 | .loader-wrapper {
496 | height: 200px;
497 | overflow: hidden;
498 | position: relative;
499 | }
500 | .loader,
501 | .loader:before,
502 | .loader:after {
503 | background: transparent;
504 | -webkit-animation: load1 1s infinite ease-in-out;
505 | animation: load1 1s infinite ease-in-out;
506 | width: 1em;
507 | height: 4em;
508 | }
509 | .loader {
510 | color: #000000;
511 | text-indent: -9999em;
512 | margin: 88px auto;
513 | position: relative;
514 | font-size: 11px;
515 | -webkit-transform: translateZ(0);
516 | -ms-transform: translateZ(0);
517 | transform: translateZ(0);
518 | -webkit-animation-delay: -0.16s;
519 | animation-delay: -0.16s;
520 | }
521 | .loader:before,
522 | .loader:after {
523 | position: absolute;
524 | top: 0;
525 | content: '';
526 | }
527 | .loader:before {
528 | left: -1.5em;
529 | -webkit-animation-delay: -0.32s;
530 | animation-delay: -0.32s;
531 | }
532 | .loader:after {
533 | left: 1.5em;
534 | }
535 | @keyframes load1 {
536 | 0%,
537 | 80%,
538 | 100% {
539 | box-shadow: 0 0;
540 | height: 4em;
541 | }
542 | 40% {
543 | box-shadow: 0 -2em;
544 | height: 5em;
545 | }
546 | }
547 |
--------------------------------------------------------------------------------
/frontend/src/Page/Options.elm:
--------------------------------------------------------------------------------
1 | module Page.Options exposing
2 | ( Model
3 | , Msg(..)
4 | , ResultAggregations
5 | , ResultItemSource
6 | , decodeResultAggregations
7 | , decodeResultItemSource
8 | , init
9 | , makeRequest
10 | , makeRequestBody
11 | , update
12 | , view
13 | , viewBuckets
14 | , viewSuccess
15 | )
16 |
17 | import Browser.Navigation
18 | import Html
19 | exposing
20 | ( Html
21 | , a
22 | , code
23 | , div
24 | , li
25 | , pre
26 | , span
27 | , strong
28 | , text
29 | , ul
30 | )
31 | import Html.Attributes
32 | exposing
33 | ( class
34 | , classList
35 | , href
36 | , target
37 | )
38 | import Html.Events
39 | exposing
40 | ( onClick
41 | )
42 | import Http exposing (Body)
43 | import Json.Decode
44 | import Json.Decode.Pipeline
45 | import Route exposing (SearchType)
46 | import Search
47 | exposing
48 | ( Details
49 | , NixOSChannel
50 | , decodeResolvedFlake
51 | )
52 | import Utils
53 |
54 |
55 |
56 | -- MODEL
57 |
58 |
59 | type alias Model =
60 | Search.Model ResultItemSource ResultAggregations
61 |
62 |
63 | type alias ResultItemSource =
64 | { name : String
65 | , description : Maybe String
66 | , type_ : Maybe String
67 | , default : Maybe String
68 | , example : Maybe String
69 | , source : Maybe String
70 |
71 | -- flake
72 | , flake : Maybe (List String)
73 | , flakeName : Maybe String
74 | , flakeDescription : Maybe String
75 | , flakeUrl : Maybe String
76 | }
77 |
78 |
79 | type alias ResultAggregations =
80 | { all : AggregationsAll
81 | }
82 |
83 |
84 | type alias AggregationsAll =
85 | { doc_count : Int
86 | }
87 |
88 |
89 | init :
90 | Route.SearchArgs
91 | -> String
92 | -> List NixOSChannel
93 | -> Maybe Model
94 | -> ( Model, Cmd Msg )
95 | init searchArgs defaultNixOSChannel nixosChannels model =
96 | let
97 | ( newModel, newCmd ) =
98 | Search.init searchArgs defaultNixOSChannel nixosChannels model
99 | in
100 | ( newModel
101 | , Cmd.map SearchMsg newCmd
102 | )
103 |
104 |
105 |
106 | -- UPDATE
107 |
108 |
109 | type Msg
110 | = SearchMsg (Search.Msg ResultItemSource ResultAggregations)
111 |
112 |
113 | update :
114 | Browser.Navigation.Key
115 | -> Msg
116 | -> Model
117 | -> List NixOSChannel
118 | -> ( Model, Cmd Msg )
119 | update navKey msg model nixosChannels =
120 | case msg of
121 | SearchMsg subMsg ->
122 | let
123 | ( newModel, newCmd ) =
124 | Search.update
125 | Route.Options
126 | navKey
127 | subMsg
128 | model
129 | nixosChannels
130 | in
131 | ( newModel, Cmd.map SearchMsg newCmd )
132 |
133 |
134 |
135 | -- VIEW
136 |
137 |
138 | view :
139 | List NixOSChannel
140 | -> Model
141 | -> Html Msg
142 | view nixosChannels model =
143 | Search.view { toRoute = Route.Options, categoryName = "options" }
144 | [ text "Search more than "
145 | , strong [] [ text "10 000 options" ]
146 | ]
147 | nixosChannels
148 | model
149 | viewSuccess
150 | viewBuckets
151 | SearchMsg
152 | []
153 |
154 |
155 | viewBuckets :
156 | Maybe String
157 | -> Search.SearchResult ResultItemSource ResultAggregations
158 | -> List (Html Msg)
159 | viewBuckets _ _ =
160 | []
161 |
162 |
163 | viewSuccess :
164 | List NixOSChannel
165 | -> String
166 | -> Details
167 | -> Maybe String
168 | -> List (Search.ResultItem ResultItemSource)
169 | -> Html Msg
170 | viewSuccess nixosChannels channel showInstallDetails show hits =
171 | ul []
172 | (List.map
173 | (viewResultItem nixosChannels channel showInstallDetails show)
174 | hits
175 | )
176 |
177 |
178 | viewResultItem :
179 | List NixOSChannel
180 | -> String
181 | -> Details
182 | -> Maybe String
183 | -> Search.ResultItem ResultItemSource
184 | -> Html Msg
185 | viewResultItem nixosChannels channel _ show item =
186 | let
187 | asPre value =
188 | pre [] [ text value ]
189 |
190 | asPreCode value =
191 | div [] [ pre [] [ code [ class "code-block" ] [ text value ] ] ]
192 |
193 | showDetails =
194 | if Just item.source.name == show then
195 | Just <|
196 | div [ Html.Attributes.map SearchMsg Search.trapClick ] <|
197 | [ div [] [ text "Name" ]
198 | , div [] [ asPreCode item.source.name ]
199 | ]
200 | ++ (item.source.description
201 | |> Maybe.andThen Utils.showHtml
202 | |> Maybe.map
203 | (\description ->
204 | [ div [] [ text "Description" ]
205 | , div [] description
206 | ]
207 | )
208 | |> Maybe.withDefault []
209 | )
210 | ++ (item.source.type_
211 | |> Maybe.map
212 | (\type_ ->
213 | [ div [] [ text "Type" ]
214 | , div [] [ asPre type_ ]
215 | ]
216 | )
217 | |> Maybe.withDefault []
218 | )
219 | ++ (item.source.default
220 | |> Maybe.map
221 | (\default ->
222 | [ div [] [ text "Default" ]
223 | , div [] <| Maybe.withDefault [ asPreCode default ] (Utils.showHtml default)
224 | ]
225 | )
226 | |> Maybe.withDefault []
227 | )
228 | ++ (item.source.example
229 | |> Maybe.map
230 | (\example ->
231 | [ div [] [ text "Example" ]
232 | , div [] <| Maybe.withDefault [ asPreCode example ] (Utils.showHtml example)
233 | ]
234 | )
235 | |> Maybe.withDefault []
236 | )
237 | ++ [ div [] [ text "Declared in" ]
238 | , div [] <| findSource nixosChannels channel item.source
239 | ]
240 |
241 | else
242 | Nothing
243 |
244 | toggle =
245 | SearchMsg (Search.ShowDetails item.source.name)
246 |
247 | isOpen =
248 | Just item.source.name == show
249 |
250 | flakeOrNixpkgs =
251 | let
252 | mkLink flake url =
253 | a [ href url ] [ text flake ]
254 | in
255 | case ( item.source.flake, item.source.flakeUrl ) of
256 | -- its a flake
257 | ( Just (flake :: []), Just url ) ->
258 | Just
259 | [ li [] [ mkLink flake url ]
260 | ]
261 |
262 | ( Just (flake :: moduleName :: []), Just url ) ->
263 | Just [ li [] [ mkLink flake url, text "#", text moduleName ] ]
264 |
265 | _ ->
266 | Nothing
267 | in
268 | li
269 | [ class "option"
270 | , classList [ ( "opened", isOpen ) ]
271 | , Search.elementId item.source.name
272 | ]
273 | <|
274 | List.filterMap identity
275 | [ Just <|
276 | ul [ class "search-result-button" ]
277 | (List.append
278 | (flakeOrNixpkgs |> Maybe.withDefault [])
279 | [ li []
280 | [ a
281 | [ onClick toggle
282 | , href ""
283 | ]
284 | [ text item.source.name ]
285 | ]
286 | ]
287 | )
288 | , showDetails
289 | ]
290 |
291 |
292 | findSource :
293 | List NixOSChannel
294 | -> String
295 | -> ResultItemSource
296 | -> List (Html a)
297 | findSource nixosChannels channel source =
298 | let
299 | githubUrlPrefix branch =
300 | "https://github.com/NixOS/nixpkgs/blob/" ++ branch ++ "/"
301 |
302 | cleanPosition value =
303 | if String.startsWith "source/" value then
304 | String.dropLeft 7 value
305 |
306 | else
307 | value
308 |
309 | asGithubLink value =
310 | case List.head (List.filter (\x -> x.id == channel) nixosChannels) of
311 | Just channelDetails ->
312 | a
313 | [ href <| githubUrlPrefix channelDetails.branch ++ (value |> String.replace ":" "#L")
314 | , target "_blank"
315 | ]
316 | [ text value ]
317 |
318 | Nothing ->
319 | text <| cleanPosition value
320 |
321 | sourceFile =
322 | Maybe.map asGithubLink source.source
323 |
324 | flakeOrNixpkgs : Maybe (List (Html a))
325 | flakeOrNixpkgs =
326 | case ( source.flake, source.flakeUrl ) of
327 | -- its a flake
328 | ( Just (name :: attrs), Just flakeUrl_ ) ->
329 | let
330 | module_ =
331 | Maybe.withDefault "(default)" <| Maybe.map (\m -> "(Module: " ++ m ++ ")") <| List.head attrs
332 | in
333 | Just <|
334 | List.append
335 | (Maybe.withDefault [] <| Maybe.map (\sourceFile_ -> [ sourceFile_, span [] [ text " in " ] ]) sourceFile)
336 | [ span [] [ text "Flake: " ]
337 | , a [ href flakeUrl_ ] [ text <| name ++ module_ ]
338 | ]
339 |
340 | ( Nothing, _ ) ->
341 | Maybe.map (\l -> [ l ]) sourceFile
342 |
343 | _ ->
344 | Nothing
345 | in
346 | Maybe.withDefault [ span [] [ text "Not Found" ] ] flakeOrNixpkgs
347 |
348 |
349 |
350 | -- API
351 |
352 |
353 | makeRequest :
354 | Search.Options
355 | -> List NixOSChannel
356 | -> SearchType
357 | -> String
358 | -> String
359 | -> Int
360 | -> Int
361 | -> Maybe String
362 | -> Search.Sort
363 | -> Cmd Msg
364 | makeRequest options nixosChannels _ channel query from size _ sort =
365 | Search.makeRequest
366 | (makeRequestBody query from size sort)
367 | nixosChannels
368 | channel
369 | decodeResultItemSource
370 | decodeResultAggregations
371 | options
372 | Search.QueryResponse
373 | (Just "query-options")
374 | |> Cmd.map SearchMsg
375 |
376 |
377 | makeRequestBody : String -> Int -> Int -> Search.Sort -> Body
378 | makeRequestBody query from size sort =
379 | Search.makeRequestBody
380 | (String.trim query)
381 | from
382 | size
383 | sort
384 | "option"
385 | "option_name"
386 | []
387 | []
388 | []
389 | "option_name"
390 | [ ( "option_name", 6.0 )
391 | , ( "option_name_query", 3.0 )
392 | , ( "option_description", 1.0 )
393 | , ( "flake_name", 0.5 )
394 | ]
395 |
396 |
397 |
398 | -- JSON
399 |
400 |
401 | decodeResultItemSource : Json.Decode.Decoder ResultItemSource
402 | decodeResultItemSource =
403 | Json.Decode.succeed ResultItemSource
404 | |> Json.Decode.Pipeline.required "option_name" Json.Decode.string
405 | |> Json.Decode.Pipeline.optional "option_description" (Json.Decode.map Just Json.Decode.string) Nothing
406 | |> Json.Decode.Pipeline.optional "option_type" (Json.Decode.map Just Json.Decode.string) Nothing
407 | |> Json.Decode.Pipeline.optional "option_default" (Json.Decode.map Just Json.Decode.string) Nothing
408 | |> Json.Decode.Pipeline.optional "option_example" (Json.Decode.map Just Json.Decode.string) Nothing
409 | |> Json.Decode.Pipeline.optional "option_source" (Json.Decode.map Just Json.Decode.string) Nothing
410 | |> Json.Decode.Pipeline.optional "option_flake"
411 | (Json.Decode.map Just <| Json.Decode.list Json.Decode.string)
412 | Nothing
413 | |> Json.Decode.Pipeline.optional "flake_name" (Json.Decode.map Just Json.Decode.string) Nothing
414 | |> Json.Decode.Pipeline.optional "flake_description" (Json.Decode.map Just Json.Decode.string) Nothing
415 | |> Json.Decode.Pipeline.optional "flake_resolved" (Json.Decode.map Just decodeResolvedFlake) Nothing
416 |
417 |
418 | decodeResultAggregations : Json.Decode.Decoder ResultAggregations
419 | decodeResultAggregations =
420 | Json.Decode.map ResultAggregations
421 | (Json.Decode.field "all" decodeResultAggregationsAll)
422 |
423 |
424 | decodeResultAggregationsAll : Json.Decode.Decoder AggregationsAll
425 | decodeResultAggregationsAll =
426 | Json.Decode.map AggregationsAll
427 | (Json.Decode.field "doc_count" Json.Decode.int)
428 |
--------------------------------------------------------------------------------
/flake-info/src/data/import.rs:
--------------------------------------------------------------------------------
1 | use std::collections::HashMap;
2 | use std::fmt;
3 | use std::marker::PhantomData;
4 | use std::{path::PathBuf, str::FromStr};
5 |
6 | use clap::arg_enum;
7 | use log::warn;
8 | use serde::de::{self, MapAccess, Visitor};
9 | use serde::{Deserialize, Deserializer, Serialize, Serializer};
10 | use serde_json::Value;
11 |
12 | use super::pandoc::PandocExt;
13 | use super::prettyprint::print_value;
14 | use super::system::System;
15 | use super::utility::{Flatten, OneOrMany};
16 |
17 | /// Holds information about a specific derivation
18 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
19 | #[serde(tag = "entry_type", rename_all = "lowercase")]
20 | pub enum FlakeEntry {
21 | /// A package as it may be defined in a flake
22 | ///
23 | /// Note: As flakes do not enforce any particular structure to be necessarily
24 | /// present, the data represented is an idealization that _should_ match in
25 | /// most cases and is open to extension.
26 | Package {
27 | attribute_name: String,
28 | name: String,
29 | version: String,
30 | platforms: Vec,
31 | outputs: Vec,
32 | default_output: String,
33 | description: Option,
34 | license: Option>>,
35 | },
36 | /// An "application" that can be called using nix run <..>
37 | App {
38 | bin: Option,
39 | attribute_name: String,
40 | platforms: Vec,
41 | app_type: Option,
42 | },
43 | /// an option defined in a module of a flake
44 | Option(NixOption),
45 | }
46 |
47 | /// The representation of an option that is part of some module and can be used
48 | /// in some nixos configuration
49 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
50 | pub struct NixOption {
51 | /// Location of the defining module(s)
52 | pub declarations: Vec,
53 |
54 | pub description: Option,
55 | pub name: String,
56 |
57 | #[serde(rename = "type")]
58 | /// Nix generated description of the options type
59 | pub option_type: Option,
60 | #[serde(deserialize_with = "optional_field", default)]
61 | #[serde(skip_serializing_if = "Option::is_none")]
62 | pub default: Option,
63 | #[serde(deserialize_with = "optional_field", default)]
64 | #[serde(skip_serializing_if = "Option::is_none")]
65 | pub example: Option,
66 |
67 | /// If defined in a flake, contains defining flake and optionally a module
68 | pub flake: Option,
69 | }
70 |
71 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
72 | #[serde(untagged)]
73 | pub enum ModulePath {
74 | /// A module taken from .nixosModule
75 | /// JSON representation is a list, therefore use a 1-Tuple as representation
76 | DefaultModule((String,)),
77 | /// A module taken from .nixosModules.
78 | NamedModule((String, String)),
79 | }
80 |
81 | #[derive(Debug, Clone, PartialEq, Deserialize)]
82 | #[serde(untagged)]
83 | pub enum DocString {
84 | DocFormat(DocFormat),
85 | String(String),
86 | }
87 |
88 | #[derive(Debug, Clone, PartialEq, Deserialize)]
89 | #[serde(tag = "_type", content = "text")]
90 | pub enum DocFormat {
91 | #[serde(rename = "mdDoc")]
92 | MarkdownDoc(String),
93 | }
94 |
95 | #[derive(Debug, Clone, PartialEq, Deserialize)]
96 | #[serde(untagged)]
97 | pub enum DocValue {
98 | Literal(Literal),
99 | Value(Value),
100 | }
101 |
102 | #[derive(Debug, Clone, PartialEq, Deserialize)]
103 | #[serde(tag = "_type", content = "text")]
104 | pub enum Literal {
105 | #[serde(rename = "literalExpression", alias = "literalExample")]
106 | LiteralExpression(String),
107 | #[serde(rename = "literalDocBook")]
108 | LiteralDocBook(String),
109 | #[serde(rename = "literalMD")]
110 | LiteralMarkdown(String),
111 | }
112 |
113 | impl Serialize for DocString {
114 | fn serialize(&self, serializer: S) -> Result
115 | where
116 | S: Serializer,
117 | {
118 | match self {
119 | DocString::String(db) => {
120 | serializer.serialize_str(&db.render_docbook().unwrap_or_else(|e| {
121 | warn!("Could not render DocBook content: {}", e);
122 | db.to_owned()
123 | }))
124 | }
125 | DocString::DocFormat(DocFormat::MarkdownDoc(md)) => {
126 | serializer.serialize_str(&md.render_markdown().unwrap_or_else(|e| {
127 | warn!("Could not render Markdown content: {}", e);
128 | md.to_owned()
129 | }))
130 | }
131 | }
132 | }
133 | }
134 |
135 | impl Serialize for DocValue {
136 | fn serialize(&self, serializer: S) -> Result
137 | where
138 | S: Serializer,
139 | {
140 | match self {
141 | DocValue::Literal(Literal::LiteralExpression(s)) => serializer.serialize_str(&s),
142 | DocValue::Literal(Literal::LiteralDocBook(db)) => {
143 | serializer.serialize_str(&db.render_docbook().unwrap_or_else(|e| {
144 | warn!("Could not render DocBook content: {}", e);
145 | db.to_owned()
146 | }))
147 | }
148 | DocValue::Literal(Literal::LiteralMarkdown(md)) => {
149 | serializer.serialize_str(&md.render_markdown().unwrap_or_else(|e| {
150 | warn!("Could not render Markdown content: {}", e);
151 | md.to_owned()
152 | }))
153 | }
154 | DocValue::Value(v) => serializer.serialize_str(&print_value(v.to_owned())),
155 | }
156 | }
157 | }
158 |
159 | /// Package as defined in nixpkgs
160 | /// These packages usually have a "more" homogenic structure that is given by
161 | /// nixpkgs
162 | /// note: This is the parsing module that deals with nested input. A flattened,
163 | /// unified representation can be found in [crate::data::export::Derivation]
164 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
165 | pub struct Package {
166 | pub pname: String,
167 | pub version: String,
168 | #[serde(default)]
169 | pub outputs: HashMap>,
170 | #[serde(rename = "outputName", default)]
171 | pub default_output: Option,
172 | pub system: String,
173 | #[serde(default)]
174 | pub meta: Meta,
175 | }
176 |
177 | /// The nixpkgs output lists attribute names as keys of a map.
178 | /// Name and Package definition are combined using this struct
179 | #[derive(Debug, Clone)]
180 | pub enum NixpkgsEntry {
181 | Derivation { attribute: String, package: Package },
182 | Option(NixOption),
183 | }
184 |
185 | /// Most information about packages in nixpkgs is contained in the meta key
186 | /// This struct represents a subset of that metadata
187 | #[derive(Debug, Clone, PartialEq, Default, Serialize, Deserialize)]
188 | pub struct Meta {
189 | pub license: Option>>,
190 | pub maintainers: Option>,
191 | pub homepage: Option>,
192 | pub platforms: Option>,
193 | pub position: Option,
194 | pub description: Option,
195 | #[serde(rename = "longDescription")]
196 | pub long_description: Option,
197 | }
198 |
199 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
200 | #[serde(untagged)]
201 | pub enum Maintainer {
202 | Full {
203 | name: Option,
204 | github: Option,
205 | email: Option,
206 | },
207 | Simple(String),
208 | }
209 |
210 | arg_enum! {
211 | /// The type of derivation (placed in packages. or apps.)
212 | /// Used to command the extraction script
213 | #[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
214 | pub enum Kind {
215 | App,
216 | Package,
217 | Option,
218 | All,
219 | }
220 | }
221 |
222 | impl AsRef for Kind {
223 | fn as_ref(&self) -> &str {
224 | match self {
225 | Kind::App => "apps",
226 | Kind::Package => "packages",
227 | Kind::Option => "options",
228 | Kind::All => "all",
229 | }
230 | }
231 | }
232 |
233 | impl Default for Kind {
234 | fn default() -> Self {
235 | Kind::All
236 | }
237 | }
238 |
239 | #[derive(Debug, Clone, PartialEq, Serialize)]
240 | pub struct StringOrStruct(pub T);
241 |
242 | impl<'de, T> Deserialize<'de> for StringOrStruct
243 | where
244 | T: Deserialize<'de> + FromStr,
245 | {
246 | fn deserialize(deserializer: D) -> Result
247 | where
248 | D: Deserializer<'de>,
249 | {
250 | Ok(StringOrStruct(string_or_struct(deserializer)?))
251 | }
252 | }
253 |
254 | /// Different representations of the licence attribute
255 | #[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
256 | #[serde(untagged)]
257 | pub enum License {
258 | None {
259 | #[serde(skip_serializing)]
260 | license: (),
261 | },
262 | Simple {
263 | license: String,
264 | },
265 | #[allow(non_snake_case)]
266 | Full {
267 | fullName: String,
268 | // shortName: String,
269 | url: Option,
270 | },
271 | Url {
272 | url: String,
273 | },
274 | }
275 |
276 | impl Default for License {
277 | fn default() -> Self {
278 | License::None { license: () }
279 | }
280 | }
281 |
282 | impl FromStr for License {
283 | // This implementation of `from_str` can never fail, so use the impossible
284 | // `Void` type as the error type.
285 | type Err = anyhow::Error;
286 |
287 | fn from_str(s: &str) -> Result {
288 | Ok(License::Simple {
289 | license: s.to_string(),
290 | })
291 | }
292 | }
293 |
294 | /// Deserialization helper that parses an item using either serde or fromString
295 | fn string_or_struct<'de, T, D>(deserializer: D) -> Result
296 | where
297 | T: Deserialize<'de> + FromStr,
298 | D: Deserializer<'de>,
299 | {
300 | // This is a Visitor that forwards string types to T's `FromStr` impl and
301 | // forwards map types to T's `Deserialize` impl. The `PhantomData` is to
302 | // keep the compiler from complaining about T being an unused generic type
303 | // parameter. We need T in order to know the Value type for the Visitor
304 | // impl.
305 | struct StringOrStruct(PhantomData T>);
306 |
307 | impl<'de, T> Visitor<'de> for StringOrStruct
308 | where
309 | T: Deserialize<'de> + FromStr,
310 | {
311 | type Value = T;
312 |
313 | fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
314 | formatter.write_str("string or map")
315 | }
316 |
317 | fn visit_str(self, value: &str) -> Result
318 | where
319 | E: de::Error,
320 | {
321 | Ok(FromStr::from_str(value).unwrap())
322 | }
323 |
324 | fn visit_map(self, map: M) -> Result
325 | where
326 | M: MapAccess<'de>,
327 | {
328 | // `MapAccessDeserializer` is a wrapper that turns a `MapAccess`
329 | // into a `Deserializer`, allowing it to be used as the input to T's
330 | // `Deserialize` implementation. T then deserializes itself using
331 | // the entries from the map visitor.
332 | Deserialize::deserialize(de::value::MapAccessDeserializer::new(map))
333 | }
334 | }
335 |
336 | deserializer.deserialize_any(StringOrStruct(PhantomData))
337 | }
338 |
339 | /// Deserializes an Option by passing `null` along to T's deserializer instead
340 | /// of treating it as a missing field
341 | fn optional_field<'de, T, D>(deserializer: D) -> Result