├── infra
├── .gitignore
├── Pulumi.yaml
├── package.json
├── Pulumi.prod.yaml
├── tsconfig.json
└── index.ts
├── assets
├── .prettierrc
├── .babelrc
├── static
│ ├── favicon.ico
│ ├── images
│ │ └── phoenix.png
│ └── robots.txt
├── postcss.config.js
├── js
│ ├── hooks.js
│ ├── charts
│ │ ├── runs
│ │ │ ├── last-runs.js
│ │ │ └── config.js
│ │ ├── utils.js
│ │ └── parts
│ │ │ ├── parts.js
│ │ │ └── config.js
│ └── app.js
├── css
│ └── app.css
├── .eslintrc.js
├── tailwind.config.js
├── stylelint.config.js
├── webpack.config.js
└── package.json
├── test
├── test_helper.exs
├── support
│ ├── factory.ex
│ ├── test_fetchers
│ │ ├── workflows_fetcher.ex
│ │ ├── errored_fetcher.ex
│ │ ├── workflow_run_jobs_fetcher.ex
│ │ └── workflow_runs_fetcher.ex
│ ├── factories
│ │ ├── repository_factory.ex
│ │ ├── workflow_factory.ex
│ │ ├── workflow_run_job_factory.ex
│ │ └── workflow_run_factory.ex
│ ├── channel_case.ex
│ ├── conn_case.ex
│ └── data_case.ex
├── dashy_web
│ ├── views
│ │ ├── layout_view_test.exs
│ │ └── error_view_test.exs
│ └── live
│ │ └── page_live_test.exs
└── dashy
│ ├── workflows_test.exs
│ ├── workflow_runs_test.exs
│ ├── workflow_run_jobs_test.exs
│ ├── charts
│ └── workflow_runs_test.exs
│ ├── fetcher_test.exs
│ └── repositories_test.exs
├── priv
├── repo
│ ├── migrations
│ │ ├── .formatter.exs
│ │ ├── 20210518090111_add_repository_id_to_workflows.exs
│ │ ├── 20210518085818_add_started_completed_to_workflow_runs.exs
│ │ ├── 20210517135803_create_repositories.exs
│ │ ├── 20210513071547_create_workflows.exs
│ │ ├── 20210514103321_add_head_sha_to_workflow_runs.exs
│ │ ├── 20210513123511_create_workflow_runs.exs
│ │ ├── 20210517102625_add_head_sha_to_jobs.exs
│ │ └── 20210514152214_create_workflow_run_jobs.exs
│ ├── seeds.exs
│ └── dev_seeds.exs
└── gettext
│ ├── en
│ └── LC_MESSAGES
│ │ └── errors.po
│ └── errors.pot
├── lib
├── dashy_web
│ ├── views
│ │ ├── layout_view.ex
│ │ ├── error_view.ex
│ │ └── error_helpers.ex
│ ├── templates
│ │ └── layout
│ │ │ ├── app.html.eex
│ │ │ ├── live.html.leex
│ │ │ └── root.html.leex
│ ├── components
│ │ ├── charts
│ │ │ ├── last_runs_component.ex
│ │ │ └── parts_component.ex
│ │ ├── card
│ │ │ ├── card_content_component.ex
│ │ │ ├── card_component.ex
│ │ │ └── card_title_component.ex
│ │ ├── modal
│ │ │ └── modal_component.ex
│ │ ├── button
│ │ │ └── button_component.ex
│ │ └── layout
│ │ │ └── layout_component.ex
│ ├── gettext.ex
│ ├── channels
│ │ └── user_socket.ex
│ ├── live
│ │ ├── page_live.ex
│ │ ├── colors.html
│ │ ├── repo
│ │ │ └── repo_live.ex
│ │ ├── ui_live.ex
│ │ └── graphs.html
│ ├── router.ex
│ ├── endpoint.ex
│ └── telemetry.ex
├── dashy
│ ├── repo.ex
│ ├── charts
│ │ ├── part.ex
│ │ ├── run.ex
│ │ ├── helpers.ex
│ │ ├── workflow_runs_fake.ex
│ │ ├── workflow_runs.ex
│ │ ├── workflow_parts.ex
│ │ └── workflow_parts_fake.ex
│ ├── schema.ex
│ ├── workflows.ex
│ ├── github_client.ex
│ ├── workflow_run_jobs.ex
│ ├── fetchers
│ │ ├── behaviours.ex
│ │ ├── gen_server_fetcher.ex
│ │ ├── workflows_fetcher.ex
│ │ ├── workflow_run_jobs_fetcher.ex
│ │ └── workflow_runs_fetcher.ex
│ ├── repositories
│ │ └── repository.ex
│ ├── workflows
│ │ └── workflow.ex
│ ├── release.ex
│ ├── workflow_runs.ex
│ ├── application.ex
│ ├── workflow_run_jobs
│ │ └── workflow_run_job.ex
│ ├── workflow_runs
│ │ └── workflow_run.ex
│ ├── repositories.ex
│ └── fetcher.ex
├── dashy.ex
└── dashy_web.ex
├── phoenix_static_buildpack.config
├── .devcontainer
├── setup-container.sh
├── setup-dev.sh
├── docker-compose.yml
├── devcontainer.json
└── Dockerfile
├── .formatter.exs
├── elixir_buildpack.config
├── config
├── dev.local.exs.example
├── test.exs
├── test.local.exs.example
├── config.exs
├── prod.secret.exs
├── releases.exs
├── prod.exs
└── dev.exs
├── rename.sh
├── .vscode
├── extensions.json
└── settings.json
├── .github
├── dependabot.yml
└── workflows
│ ├── deploy.yml
│ ├── docs.yml
│ └── ci.yml
├── README.md
├── app.json
├── .gitignore
├── Dockerfile
├── mix.exs
├── .credo.exs
└── mix.lock
/infra/.gitignore:
--------------------------------------------------------------------------------
1 | /bin/
2 | /node_modules/
3 |
--------------------------------------------------------------------------------
/assets/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "semi": false,
3 | "singleQuote": false
4 | }
5 |
--------------------------------------------------------------------------------
/assets/.babelrc:
--------------------------------------------------------------------------------
1 | {
2 | "presets": [
3 | "@babel/preset-env"
4 | ]
5 | }
6 |
--------------------------------------------------------------------------------
/test/test_helper.exs:
--------------------------------------------------------------------------------
1 | ExUnit.start()
2 | Ecto.Adapters.SQL.Sandbox.mode(Dashy.Repo, :manual)
3 |
--------------------------------------------------------------------------------
/assets/static/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codegram/dashy/main/assets/static/favicon.ico
--------------------------------------------------------------------------------
/priv/repo/migrations/.formatter.exs:
--------------------------------------------------------------------------------
1 | [
2 | import_deps: [:ecto_sql],
3 | inputs: ["*.exs"]
4 | ]
5 |
--------------------------------------------------------------------------------
/lib/dashy_web/views/layout_view.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.LayoutView do
2 | use DashyWeb, :view
3 | end
4 |
--------------------------------------------------------------------------------
/assets/static/images/phoenix.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/codegram/dashy/main/assets/static/images/phoenix.png
--------------------------------------------------------------------------------
/infra/Pulumi.yaml:
--------------------------------------------------------------------------------
1 | name: dashy
2 | runtime: nodejs
3 | description: A minimal Google Cloud TypeScript Pulumi program
4 |
--------------------------------------------------------------------------------
/phoenix_static_buildpack.config:
--------------------------------------------------------------------------------
1 | # Node.js version
2 | node_version=15.10.0
3 | compile="compile"
4 | assets_path=assets
5 | phoenix_ex=phx
--------------------------------------------------------------------------------
/lib/dashy/repo.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repo do
2 | use Ecto.Repo,
3 | otp_app: :dashy,
4 | adapter: Ecto.Adapters.Postgres
5 | end
6 |
--------------------------------------------------------------------------------
/lib/dashy/charts/part.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Charts.Part do
2 | @derive Jason.Encoder
3 | defstruct [:name, :time, :seconds, :minutes, :link]
4 | end
5 |
--------------------------------------------------------------------------------
/lib/dashy/charts/run.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Charts.Run do
2 | @derive Jason.Encoder
3 | defstruct [:time, :seconds, :minutes, :link, :status]
4 | end
5 |
--------------------------------------------------------------------------------
/.devcontainer/setup-container.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | sh -c "$(wget -O- https://github.com/deluan/zsh-in-docker/releases/download/v1.0.2/zsh-in-docker.sh)"
--------------------------------------------------------------------------------
/.devcontainer/setup-dev.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | cp config/dev.local.exs.example config/dev.local.exs
4 | cp config/test.local.exs.example config/test.local.exs
5 |
6 | mix compile
7 | mix setup
--------------------------------------------------------------------------------
/.formatter.exs:
--------------------------------------------------------------------------------
1 | [
2 | import_deps: [:ecto, :phoenix, :surface],
3 | inputs: ["*.{ex,exs}", "priv/*/seeds.exs", "{config,lib,test}/**/*.{ex,exs}"],
4 | subdirectories: ["priv/*/migrations"]
5 | ]
6 |
--------------------------------------------------------------------------------
/lib/dashy/schema.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Schema do
2 | defmacro __using__(_) do
3 | quote do
4 | use Ecto.Schema
5 | @timestamps_opts [type: :utc_datetime_usec]
6 | end
7 | end
8 | end
9 |
--------------------------------------------------------------------------------
/elixir_buildpack.config:
--------------------------------------------------------------------------------
1 | # Elixir version
2 | elixir_version=1.11.3
3 |
4 | # Erlang version
5 | # available versions https://github.com/HashNuke/heroku-buildpack-elixir-otp-builds/blob/master/otp-versions
6 | erlang_version=23.2.5
7 |
--------------------------------------------------------------------------------
/assets/static/robots.txt:
--------------------------------------------------------------------------------
1 | # See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
2 | #
3 | # To ban all spiders from the entire site uncomment the next two lines:
4 | # User-agent: *
5 | # Disallow: /
6 |
--------------------------------------------------------------------------------
/assets/postcss.config.js:
--------------------------------------------------------------------------------
1 | // postcss.config.js
2 | module.exports = {
3 | plugins: [
4 | require("postcss-import"),
5 | require("tailwindcss"),
6 | require("postcss-nested"),
7 | require("autoprefixer"),
8 | ],
9 | }
10 |
--------------------------------------------------------------------------------
/test/support/factory.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Factory do
2 | use ExMachina.Ecto, repo: Dashy.Repo
3 |
4 | use Dashy.RepositoryFactory
5 | use Dashy.WorkflowFactory
6 | use Dashy.WorkflowRunFactory
7 | use Dashy.WorkflowRunJobFactory
8 | end
9 |
--------------------------------------------------------------------------------
/lib/dashy_web/templates/layout/app.html.eex:
--------------------------------------------------------------------------------
1 |
2 | <%= get_flash(@conn, :info) %>
3 | <%= get_flash(@conn, :error) %>
4 | <%= @inner_content %>
5 |
6 |
--------------------------------------------------------------------------------
/assets/js/hooks.js:
--------------------------------------------------------------------------------
1 | import LastRunsHooks from "./charts/runs/last-runs.js"
2 | import PartsHooks from "./charts/parts/parts.js"
3 |
4 | const Hooks = {}
5 |
6 | Hooks.LastRunsChart = LastRunsHooks
7 | Hooks.PartsChart = PartsHooks
8 |
9 | export default Hooks
10 |
--------------------------------------------------------------------------------
/assets/css/app.css:
--------------------------------------------------------------------------------
1 | @import "tailwindcss/base";
2 | @import "tailwindcss/components";
3 | @import "tailwindcss/utilities";
4 | @import "../node_modules/nprogress/nprogress.css";
5 |
6 | /* Hides Phoenix's live reload iframe */
7 | iframe[hidden] {
8 | display: none;
9 | }
10 |
--------------------------------------------------------------------------------
/config/dev.local.exs.example:
--------------------------------------------------------------------------------
1 | use Mix.Config
2 |
3 | # Configure your database
4 | config :dashy, Dashy.Repo,
5 | username: "postgres",
6 | password: "postgres",
7 | database: "database",
8 | hostname: "db",
9 | show_sensitive_data_on_connection_error: true,
10 | pool_size: 10
11 |
--------------------------------------------------------------------------------
/lib/dashy.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy do
2 | @moduledoc """
3 | Dashy keeps the contexts that define your domain
4 | and business logic.
5 |
6 | Contexts are also responsible for managing your data, regardless
7 | if it comes from the database, an external API or others.
8 | """
9 | end
10 |
--------------------------------------------------------------------------------
/infra/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "dashy",
3 | "devDependencies": {
4 | "@types/node": "^10.0.0"
5 | },
6 | "dependencies": {
7 | "@codegram/pulumi-utils": "^1.0.0-beta.15",
8 | "@pulumi/gcp": "^5.0.0",
9 | "@pulumi/pulumi": "^3.0.0"
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/infra/Pulumi.prod.yaml:
--------------------------------------------------------------------------------
1 | config:
2 | dashy:clusterStackRef: codegram/genesis-cluster/prod
3 | dashy:githubToken:
4 | secure: AAABAN9zHMuBKorD6yAu8l9KnPH6XRTwml49tcUnZFmSyOTg3Z/hxdkk+e1+QNdUCopgvG5p1JDGN2fXgh45TS5iqYJL3aI9
5 | dashy:gcpProjectId: labs-260007
6 | gcp:project: labs-260007
7 | gcp:zone: europe-west3-a
8 |
--------------------------------------------------------------------------------
/lib/dashy_web/components/charts/last_runs_component.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Components.Charts.LastRuns do
2 | use Surface.Component
3 |
4 | def render(assigns) do
5 | ~H"""
6 |
7 |
8 |
9 | """
10 | end
11 | end
12 |
--------------------------------------------------------------------------------
/test/dashy_web/views/layout_view_test.exs:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.LayoutViewTest do
2 | use DashyWeb.ConnCase, async: true
3 |
4 | # When testing helpers, you may want to import Phoenix.HTML and
5 | # use functions such as safe_to_string() to convert the helper
6 | # result into an HTML string.
7 | # import Phoenix.HTML
8 | end
9 |
--------------------------------------------------------------------------------
/priv/repo/migrations/20210518090111_add_repository_id_to_workflows.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repo.Migrations.AddRepositoryIdToWorkflows do
2 | use Ecto.Migration
3 |
4 | def change do
5 | alter table("workflows") do
6 | add :repository_id, references(:repositories, on_delete: :delete_all), null: false
7 | end
8 | end
9 | end
10 |
--------------------------------------------------------------------------------
/config/test.exs:
--------------------------------------------------------------------------------
1 | use Mix.Config
2 |
3 | # We don't run a server during test. If one is required,
4 | # you can enable the server option below.
5 | config :dashy, DashyWeb.Endpoint,
6 | http: [port: 4002],
7 | server: false
8 |
9 | # Print only warnings and errors during test
10 | config :logger, level: :warn
11 |
12 | import_config "test.local.exs"
13 |
--------------------------------------------------------------------------------
/lib/dashy_web/components/card/card_content_component.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Components.CardContent do
2 | use Surface.Component
3 |
4 | @doc "The content of the Card"
5 | slot default, required: true
6 |
7 | def render(assigns) do
8 | ~H"""
9 |
10 |
11 |
12 | """
13 | end
14 | end
15 |
--------------------------------------------------------------------------------
/rename.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | find . -type f -name '.gitignore' -exec sed -i 's/phoenix_starter/dashy/g' {} \;
4 | find . -type f -regex '.*\.\(eex\|ex\|exs\|leex\|md|json|yml\)' -exec sed -i 's/phoenix_starter/dashy/g' {} \;
5 | find . -type f -regex '.*\.\(eex\|ex\|exs\|leex\|md|json|yml\)' -exec sed -i 's/PhoenixStarter/Dashy/g' {} \;
6 | rename 's/phoenix_starter/dashy/' **/*
--------------------------------------------------------------------------------
/priv/repo/migrations/20210518085818_add_started_completed_to_workflow_runs.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repo.Migrations.AddStartedCompletedToWorkflowRuns do
2 | use Ecto.Migration
3 |
4 | def change do
5 | alter table("workflow_runs") do
6 | add :started_at, :utc_datetime, null: true
7 | add :completed_at, :utc_datetime, null: true
8 | end
9 | end
10 | end
11 |
--------------------------------------------------------------------------------
/priv/repo/seeds.exs:
--------------------------------------------------------------------------------
1 | # Script for populating the database. You can run it as:
2 | #
3 | # mix run priv/repo/seeds.exs
4 | #
5 | # Inside the script, you can read and write to any of your
6 | # repositories directly:
7 | #
8 | # Dashy.Repo.insert!(%Dashy.SomeSchema{})
9 | #
10 | # We recommend using the bang functions (`insert!`, `update!`
11 | # and so on) as they will fail if something goes wrong.
12 |
--------------------------------------------------------------------------------
/assets/js/charts/runs/last-runs.js:
--------------------------------------------------------------------------------
1 | import Chart from "chart.js/auto"
2 | import { config, buildLabels } from "./config"
3 |
4 | const LastRunsHooks = {
5 | mounted() {
6 | var chart = new Chart(this.el, config)
7 |
8 | this.handleEvent("load-runs", ({ data }) => {
9 | chart.data = buildLabels(data)
10 | chart.update()
11 | })
12 | },
13 | }
14 |
15 | export default LastRunsHooks
16 |
--------------------------------------------------------------------------------
/test/dashy_web/live/page_live_test.exs:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.PageLiveTest do
2 | use DashyWeb.ConnCase
3 |
4 | import Phoenix.LiveViewTest
5 |
6 | test "disconnected and connected render", %{conn: conn} do
7 | {:ok, page_live, disconnected_html} = live(conn, "/")
8 | assert disconnected_html =~ "Repos"
9 | assert render(page_live) =~ "Here you can see some awesome repos"
10 | end
11 | end
12 |
--------------------------------------------------------------------------------
/test/support/test_fetchers/workflows_fetcher.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.TestFetchers.WorkflowsFetcher do
2 | @behaviour GitHubWorkflowsFetcher
3 |
4 | import Dashy.Factory
5 |
6 | @impl GitHubWorkflowsFetcher
7 | def get(_repo) do
8 | workflows = [
9 | params_for(:workflow),
10 | params_for(:workflow),
11 | params_for(:workflow)
12 | ]
13 |
14 | %{body: workflows}
15 | end
16 | end
17 |
--------------------------------------------------------------------------------
/lib/dashy_web/components/charts/parts_component.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Components.Charts.Parts do
2 | use Surface.Component
3 |
4 | @doc "The list of the card"
5 | prop list, :string, required: false
6 |
7 | def render(assigns) do
8 | ~H"""
9 |
10 |
11 |
12 | """
13 | end
14 | end
15 |
--------------------------------------------------------------------------------
/lib/dashy_web/templates/layout/live.html.leex:
--------------------------------------------------------------------------------
1 |
2 | <%= live_flash(@flash, :info) %>
5 |
6 | <%= live_flash(@flash, :error) %>
9 |
10 | <%= @inner_content %>
11 |
12 |
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "JakeBecker.elixir-ls",
4 | "esbenp.prettier-vscode",
5 | "dbaeumer.vscode-eslint",
6 | "bradlc.vscode-tailwindcss",
7 | "adrianwilczynski.alpine-js-intellisense",
8 | "stylelint.vscode-stylelint",
9 | "ms-azuretools.vscode-docker",
10 | "pantajoe.vscode-elixir-credo",
11 | "stkb.rewrap",
12 | "samuel-pordeus.elixir-test"
13 | ]
14 | }
15 |
--------------------------------------------------------------------------------
/lib/dashy/workflows.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Workflows do
2 | alias Dashy.Repo
3 | alias Dashy.Workflows.Workflow
4 |
5 | def get_by_external_id(id), do: Repo.get_by(Workflow, external_id: id)
6 |
7 | def create_or_update(attrs) do
8 | case get_by_external_id(attrs.external_id) do
9 | nil -> %Workflow{}
10 | workflow -> workflow
11 | end
12 | |> Workflow.changeset(attrs)
13 | |> Repo.insert_or_update()
14 | end
15 | end
16 |
--------------------------------------------------------------------------------
/test/support/test_fetchers/errored_fetcher.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.TestFetchers.ErroredFetcher do
2 | @behaviour GitHubWorkflowsFetcher
3 | @behaviour GitHubWorkflowRunsFetcher
4 |
5 | @impl GitHubWorkflowsFetcher
6 | def get(repo) do
7 | {:error, "whoops in #{repo}"}
8 | end
9 |
10 | @impl GitHubWorkflowRunsFetcher
11 | def get(repo, branch, page) do
12 | {:error, "whoops in #{page} of #{repo}, branch #{branch}"}
13 | end
14 | end
15 |
--------------------------------------------------------------------------------
/lib/dashy/charts/helpers.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Charts.Helpers do
2 | def generate_colors(total) do
3 | 0..(total - 1)
4 | |> Enum.map(fn index ->
5 | %{
6 | h: "#{360 / total * index}",
7 | s: "#{50 + 25 * rem(index, 3)}%",
8 | l: "#{75 - 25 * rem(index, 3)}%"
9 | }
10 | end)
11 | end
12 |
13 | def build_style_color(%{h: h, s: s, l: l}, a \\ "100%") do
14 | "hsla(#{h}, #{s}, #{l}, #{a})"
15 | end
16 | end
17 |
--------------------------------------------------------------------------------
/priv/repo/migrations/20210517135803_create_repositories.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repo.Migrations.CreateRepositories do
2 | use Ecto.Migration
3 |
4 | def change do
5 | create table(:repositories) do
6 | add :name, :string, null: false
7 | add :user, :string, null: false
8 | add :branch, :string, null: false
9 |
10 | timestamps()
11 | end
12 |
13 | create unique_index(:repositories, [:user, :name], name: :unique_index_repo)
14 | end
15 | end
16 |
--------------------------------------------------------------------------------
/test/support/test_fetchers/workflow_run_jobs_fetcher.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.TestFetchers.WorkflowRunJobsFetcher do
2 | @behaviour GitHubWorkflowRunJobsFetcher
3 |
4 | import Dashy.Factory
5 |
6 | @impl GitHubWorkflowRunJobsFetcher
7 | def get(_repo, id) do
8 | workflow_run_jobs = [
9 | params_for(:workflow_run_job, workflow_run_id: id),
10 | params_for(:workflow_run_job, workflow_run_id: id)
11 | ]
12 |
13 | %{body: workflow_run_jobs}
14 | end
15 | end
16 |
--------------------------------------------------------------------------------
/lib/dashy/github_client.ex:
--------------------------------------------------------------------------------
1 | defmodule GitHubClient do
2 | def get(url) do
3 | headers = [
4 | Authorization: "token #{Application.get_env(:dashy, Dashy.Fetcher)[:token]}"
5 | ]
6 |
7 | options = [recv_timeout: 30_000]
8 |
9 | case HTTPoison.get(url, headers, options) do
10 | {:ok, %{status_code: 404} = response} -> {:error, response}
11 | {:ok, %{status_code: 200} = response} -> {:ok, response}
12 | {:error, _} = err -> err
13 | end
14 | end
15 | end
16 |
--------------------------------------------------------------------------------
/assets/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | root: true,
3 | env: {
4 | browser: true,
5 | node: true,
6 | es6: true,
7 | },
8 | extends: ["eslint:recommended", "prettier", "plugin:prettier/recommended"],
9 | plugins: ["prettier"],
10 | rules: {
11 | "no-unused-vars": [
12 | "error",
13 | {
14 | argsIgnorePattern: "^_",
15 | },
16 | ],
17 | "prefer-const": ["error"],
18 | "prettier/prettier": "error",
19 | },
20 | parser: "babel-eslint",
21 | }
22 |
--------------------------------------------------------------------------------
/lib/dashy/workflow_run_jobs.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.WorkflowRunJobs do
2 | alias Dashy.Repo
3 | alias Dashy.WorkflowRunJobs.WorkflowRunJob
4 |
5 | def get_by_external_id(id), do: Repo.get_by(WorkflowRunJob, external_id: id)
6 |
7 | def create_or_update(attrs) do
8 | case get_by_external_id(attrs.external_id) do
9 | nil -> %WorkflowRunJob{}
10 | workflow_run_job -> workflow_run_job
11 | end
12 | |> WorkflowRunJob.changeset(attrs)
13 | |> Repo.insert_or_update()
14 | end
15 | end
16 |
--------------------------------------------------------------------------------
/test/dashy_web/views/error_view_test.exs:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.ErrorViewTest do
2 | use DashyWeb.ConnCase, async: true
3 |
4 | # Bring render/3 and render_to_string/3 for testing custom views
5 | import Phoenix.View
6 |
7 | test "renders 404.html" do
8 | assert render_to_string(DashyWeb.ErrorView, "404.html", []) == "Not Found"
9 | end
10 |
11 | test "renders 500.html" do
12 | assert render_to_string(DashyWeb.ErrorView, "500.html", []) ==
13 | "Internal Server Error"
14 | end
15 | end
16 |
--------------------------------------------------------------------------------
/infra/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "strict": true,
4 | "outDir": "bin",
5 | "target": "es2016",
6 | "module": "commonjs",
7 | "moduleResolution": "node",
8 | "sourceMap": true,
9 | "experimentalDecorators": true,
10 | "pretty": true,
11 | "noFallthroughCasesInSwitch": true,
12 | "noImplicitReturns": true,
13 | "forceConsistentCasingInFileNames": true
14 | },
15 | "files": [
16 | "index.ts"
17 | ]
18 | }
19 |
--------------------------------------------------------------------------------
/lib/dashy_web/components/card/card_component.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Components.Card do
2 | use Surface.Component
3 |
4 | @doc "The content of the Card"
5 | slot default, required: true
6 |
7 | @doc "The title of the card"
8 | prop title, :string, required: false
9 |
10 | def render(assigns) do
11 | ~H"""
12 |
17 | """
18 | end
19 | end
20 |
--------------------------------------------------------------------------------
/test/support/factories/repository_factory.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.RepositoryFactory do
2 | @moduledoc """
3 | This module contains the repository factories.
4 | """
5 |
6 | use ExMachina.Ecto, repo: Dashy.Repo
7 |
8 | defmacro __using__(_opts) do
9 | quote do
10 | def repository_factory do
11 | %Dashy.Repositories.Repository{
12 | user: sequence(:user, &"user-#{&1}"),
13 | name: "repo",
14 | branch: "branch"
15 | }
16 | end
17 | end
18 | end
19 | end
20 |
--------------------------------------------------------------------------------
/config/test.local.exs.example:
--------------------------------------------------------------------------------
1 | use Mix.Config
2 |
3 | database_host = System.get_env("DATABASE_HOST") || "db"
4 |
5 | # Configure your database
6 | #
7 | # The MIX_TEST_PARTITION environment variable can be used
8 | # to provide built-in test partitioning in CI environment.
9 | # Run `mix help test` for more information.
10 | config :dashy, Dashy.Repo,
11 | username: "postgres",
12 | password: "postgres",
13 | database: "database_test#{System.get_env("MIX_TEST_PARTITION")}",
14 | hostname: database_host,
15 | pool: Ecto.Adapters.SQL.Sandbox
16 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "github-actions"
4 | directory: "/"
5 | schedule:
6 | interval: "weekly"
7 | time: "07:00"
8 | timezone: "Europe/Berlin"
9 | - package-ecosystem: mix
10 | directory: "/"
11 | schedule:
12 | interval: "weekly"
13 | time: "07:00"
14 | timezone: "Europe/Berlin"
15 | - package-ecosystem: "npm"
16 | directory: "/assets"
17 | schedule:
18 | interval: "weekly"
19 | time: "07:00"
20 | timezone: "Europe/Berlin"
21 |
--------------------------------------------------------------------------------
/test/support/test_fetchers/workflow_runs_fetcher.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.TestFetchers.WorkflowRunsFetcher do
2 | @behaviour GitHubWorkflowRunsFetcher
3 |
4 | import Dashy.Factory
5 |
6 | @impl GitHubWorkflowRunsFetcher
7 | def get(_repo, _branch, 1) do
8 | workflow_runs = [
9 | params_for(:workflow_run_with_workflow_id),
10 | params_for(:workflow_run_with_workflow_id)
11 | ]
12 |
13 | %{body: workflow_runs}
14 | end
15 |
16 | @impl GitHubWorkflowRunsFetcher
17 | def get(_repo, _branch, _page), do: %{body: []}
18 | end
19 |
--------------------------------------------------------------------------------
/priv/repo/migrations/20210513071547_create_workflows.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repo.Migrations.CreateWorkflows do
2 | use Ecto.Migration
3 |
4 | def change do
5 | create table(:workflows) do
6 | add :external_id, :bigint, null: false
7 | add :name, :string, null: false
8 | add :node_id, :string, null: false
9 | add :path, :string, null: false
10 | add :state, :string, null: false
11 |
12 | timestamps(inserted_at: :created_at)
13 | end
14 |
15 | create unique_index(:workflows, [:external_id])
16 | end
17 | end
18 |
--------------------------------------------------------------------------------
/lib/dashy/fetchers/behaviours.ex:
--------------------------------------------------------------------------------
1 | defmodule GitHubWorkflowsFetcher do
2 | @callback get(Dashy.Repositories.Repository.t()) ::
3 | %{body: any() | [any(), ...]} | {:error, any()}
4 | end
5 |
6 | defmodule GitHubWorkflowRunsFetcher do
7 | @callback get(String.t(), String.t(), integer()) ::
8 | %{body: any() | [any(), ...]} | {:error, any(), any()}
9 | end
10 |
11 | defmodule GitHubWorkflowRunJobsFetcher do
12 | @callback get(String.t(), integer()) ::
13 | %{body: any() | [any(), ...]} | {:error, any(), any()}
14 | end
15 |
--------------------------------------------------------------------------------
/.devcontainer/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: "3"
2 |
3 | services:
4 | phoenix:
5 | build:
6 | context: .
7 | dockerfile: Dockerfile
8 | depends_on:
9 | - db
10 | command: /bin/sh -c "while sleep 1000; do :; done"
11 | db:
12 | image: postgres:13.1
13 | environment:
14 | POSTGRES_USER: postgres
15 | POSTGRES_PASSWORD: postgres
16 | POSTGRES_DB: database
17 | PGDATA: /var/lib/postgresql/data/pgdata
18 | restart: always
19 | volumes:
20 | - db:/var/lib/postgresql/data
21 |
22 | volumes:
23 | db:
24 |
--------------------------------------------------------------------------------
/assets/js/charts/utils.js:
--------------------------------------------------------------------------------
1 | function secondsToHms(d) {
2 | d = Number(d)
3 | var h = Math.floor(d / 3600)
4 | var m = Math.floor((d % 3600) / 60)
5 | var s = Math.floor((d % 3600) % 60)
6 | var hDisplay = h > 0 ? h + (h == 1 ? " hour, " : " hours, ") : ""
7 | var mDisplay = m > 0 ? m + (m == 1 ? " minute, " : " minutes, ") : ""
8 | var sDisplay = s > 0 ? s + (s == 1 ? " second" : " seconds") : ""
9 | return hDisplay + mDisplay + sDisplay
10 | }
11 |
12 | function toTime(ctx) {
13 | return secondsToHms(ctx.parsed.y * 60)
14 | }
15 |
16 | export default toTime
17 |
--------------------------------------------------------------------------------
/lib/dashy_web/views/error_view.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.ErrorView do
2 | use DashyWeb, :view
3 |
4 | # If you want to customize a particular status code
5 | # for a certain format, you may uncomment below.
6 | # def render("500.html", _assigns) do
7 | # "Internal Server Error"
8 | # end
9 |
10 | # By default, Phoenix returns the status message from
11 | # the template name. For example, "404.html" becomes
12 | # "Not Found".
13 | def template_not_found(template, _assigns) do
14 | Phoenix.Controller.status_message_from_template(template)
15 | end
16 | end
17 |
--------------------------------------------------------------------------------
/priv/repo/migrations/20210514103321_add_head_sha_to_workflow_runs.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repo.Migrations.AddHeadShaToWorkflowRuns do
2 | use Ecto.Migration
3 |
4 | def up do
5 | alter table("workflow_runs") do
6 | add :head_sha, :string
7 | end
8 |
9 | execute """
10 | UPDATE workflow_runs SET head_sha = metadata->'head_sha';
11 | """
12 |
13 | alter table(:workflow_runs) do
14 | modify :head_sha, :string, null: false
15 | end
16 | end
17 |
18 | def down do
19 | alter table(:workflow_runs) do
20 | remove :head_sha, :string, null: false
21 | end
22 | end
23 | end
24 |
--------------------------------------------------------------------------------
/lib/dashy/fetchers/gen_server_fetcher.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Fetchers.GenServerFetcher do
2 | use GenServer
3 |
4 | alias Dashy.Fetcher
5 |
6 | def start_link(options) do
7 | GenServer.start_link(__MODULE__, %{}, options)
8 | end
9 |
10 | def fetch(pid, repo) do
11 | GenServer.cast(pid, {:fetch, repo})
12 | end
13 |
14 | @impl true
15 | def init(_) do
16 | {:ok, %{}}
17 | end
18 |
19 | @impl true
20 | def handle_cast({:fetch, repo}, _) do
21 | Fetcher.update_workflows(repo)
22 | Fetcher.update_workflow_runs(repo)
23 | Fetcher.update_all_workflow_run_jobs(repo)
24 | {:noreply, repo}
25 | end
26 | end
27 |
--------------------------------------------------------------------------------
/lib/dashy/repositories/repository.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repositories.Repository do
2 | use Ecto.Schema
3 | import Ecto.Changeset
4 |
5 | alias Dashy.Workflows.Workflow
6 |
7 | schema "repositories" do
8 | field :name, :string
9 | field :user, :string
10 | field :branch, :string
11 |
12 | has_many :workflows, Workflow
13 |
14 | timestamps()
15 | end
16 |
17 | @doc false
18 | def changeset(repository, attrs) do
19 | repository
20 | |> cast(attrs, [:name, :user, :branch])
21 | |> validate_required([:name, :user, :branch])
22 | |> unique_constraint(:unique_repo, name: :unique_index_repo)
23 | end
24 | end
25 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Dashy
2 |
3 | [](https://heroku.com/deploy?template=https://github.com/codegram/dashy)
4 |
5 | [](https://github.com/codegram/dashy/actions/workflows/ci.yml) [](https://codegram.github.io/dashy)
6 |
7 | ## Adding real data
8 |
9 | 1. Get yourself a custom Personal Access Token from your GitHub account.
10 | 2. Start your locale console using `GITHUB_TOKEN= iex -S mix`
11 | 3. Run this piece of code:
12 |
13 | ```
14 | mix run priv/repo/dev_seeds.exs
15 | ```
--------------------------------------------------------------------------------
/.github/workflows/deploy.yml:
--------------------------------------------------------------------------------
1 |
2 | name: Build & Deploy
3 | on:
4 | push:
5 | branches:
6 | - "deploy"
7 | - "main"
8 | jobs:
9 | build_and_deploy:
10 | name: Deploy infra
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v2
14 | with:
15 | fetch-depth: 1
16 | - uses: satackey/action-docker-layer-caching@v0.0.11
17 | - uses: docker://pulumi/actions
18 | with:
19 | args: up --yes
20 | env:
21 | GOOGLE_CREDENTIALS: ${{ secrets.GOOGLE_CREDENTIALS }}
22 | PULUMI_ACCESS_TOKEN: ${{ secrets.PULUMI_ACCESS_TOKEN }}
23 | PULUMI_CI: up
24 | PULUMI_ROOT: infra
--------------------------------------------------------------------------------
/test/support/factories/workflow_factory.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.WorkflowFactory do
2 | @moduledoc """
3 | This module contains the workflow factories.
4 | """
5 |
6 | use ExMachina.Ecto, repo: Dashy.Repo
7 |
8 | defmacro __using__(_opts) do
9 | quote do
10 | def workflow_factory do
11 | %Dashy.Workflows.Workflow{
12 | repository: build(:repository),
13 | name: "My workflow",
14 | path: ".github/workflows/my_workflow.yml",
15 | state: "active",
16 | node_id: sequence(:node_id, &"node-id-#{&1}"),
17 | external_id: sequence(:workflow_external_id, fn id -> id end)
18 | }
19 | end
20 | end
21 | end
22 | end
23 |
--------------------------------------------------------------------------------
/priv/repo/migrations/20210513123511_create_workflow_runs.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repo.Migrations.CreateWorkflowRuns do
2 | use Ecto.Migration
3 |
4 | def change do
5 | create table("workflow_runs") do
6 | add :external_id, :bigint, null: false
7 | add :name, :string, null: false
8 | add :node_id, :string, null: false
9 | add :conclusion, :string, null: false
10 | add :status, :string, null: false
11 | add :workflow_id, references(:workflows, column: :external_id, type: :integer), null: false
12 | add :metadata, :jsonb
13 |
14 | timestamps(inserted_at: :created_at)
15 | end
16 |
17 | create unique_index(:workflow_runs, [:external_id])
18 | end
19 | end
20 |
--------------------------------------------------------------------------------
/lib/dashy_web/templates/layout/root.html.leex:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | <%= csrf_meta_tag() %>
8 | <%= live_title_tag assigns[:page_title] || "Dashy", suffix: " · Phoenix Framework" %>
9 | "/>
10 |
11 |
12 |
13 | <%= @inner_content %>
14 |
15 |
16 |
--------------------------------------------------------------------------------
/priv/repo/migrations/20210517102625_add_head_sha_to_jobs.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repo.Migrations.AddHeadShaToJobs do
2 | use Ecto.Migration
3 |
4 | def up do
5 | alter table("workflow_run_jobs") do
6 | add :head_sha, :string
7 | end
8 |
9 | execute """
10 | UPDATE workflow_run_jobs SET head_sha = metadata->'head_sha'::text;
11 | """
12 |
13 | execute """
14 | UPDATE workflow_run_jobs SET head_sha = REPLACE(head_sha, '"', '');
15 | """
16 |
17 | alter table(:workflow_run_jobs) do
18 | modify :head_sha, :string, null: false
19 | end
20 | end
21 |
22 | def down do
23 | alter table(:workflow_run_jobs) do
24 | remove :head_sha, :string, null: false
25 | end
26 | end
27 | end
28 |
--------------------------------------------------------------------------------
/app.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Dashy",
3 | "description": "Dashy application",
4 | "keywords": [],
5 | "success_url": "/",
6 | "scripts": {
7 | "postdeploy": "mix do ecto.migrate, run priv/repo/seeds.exs"
8 | },
9 | "env": {
10 | "SECRET_KEY_BASE": {
11 | "description": "A secret key for verifying the integrity of signed cookies.",
12 | "generator": "secret"
13 | },
14 | "HEROKU_APP_NAME": {
15 | "required": true
16 | },
17 | "POOL_SIZE": 2
18 | },
19 | "buildpacks": [
20 | {
21 | "url": "https://github.com/HashNuke/heroku-buildpack-elixir"
22 | },
23 | {
24 | "url": "https://github.com/gjaldon/heroku-buildpack-phoenix-static.git"
25 | }
26 | ],
27 | "addons": ["heroku-postgresql"]
28 | }
29 |
--------------------------------------------------------------------------------
/.devcontainer/devcontainer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Dashy",
3 | "dockerComposeFile": [
4 | "docker-compose.yml"
5 | ],
6 | "service": "phoenix",
7 | "workspaceFolder": "/home/vscode/app",
8 | "extensions": [
9 | "JakeBecker.elixir-ls",
10 | "esbenp.prettier-vscode",
11 | "dbaeumer.vscode-eslint",
12 | "bradlc.vscode-tailwindcss",
13 | "adrianwilczynski.alpine-js-intellisense",
14 | "stylelint.vscode-stylelint",
15 | "ms-azuretools.vscode-docker",
16 | "pantajoe.vscode-elixir-credo",
17 | "stkb.rewrap",
18 | "samuel-pordeus.elixir-test"
19 | ],
20 | "settings": {
21 | "terminal.integrated.shell.linux": "/usr/bin/zsh"
22 | },
23 | "forwardPorts": [
24 | 4000
25 | ],
26 | "postCreateCommand": "bash .devcontainer/setup-dev.sh"
27 | }
--------------------------------------------------------------------------------
/lib/dashy/workflows/workflow.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Workflows.Workflow do
2 | use Dashy.Schema
3 | import Ecto.Changeset
4 |
5 | alias Dashy.Repositories.Repository
6 |
7 | schema "workflows" do
8 | field :external_id, :integer
9 | field :name, :string
10 | field :node_id, :string
11 | field :path, :string
12 | field :state, :string
13 |
14 | belongs_to :repository, Repository
15 |
16 | timestamps(inserted_at: :created_at)
17 | end
18 |
19 | @doc false
20 | def changeset(workflow, attrs) do
21 | workflow
22 | |> cast(attrs, [:external_id, :name, :path, :node_id, :state, :repository_id])
23 | |> validate_required([:external_id, :name, :node_id, :path, :state, :repository_id])
24 | |> unique_constraint(:external_id)
25 | end
26 | end
27 |
--------------------------------------------------------------------------------
/lib/dashy_web/gettext.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Gettext do
2 | @moduledoc """
3 | A module providing Internationalization with a gettext-based API.
4 |
5 | By using [Gettext](https://hexdocs.pm/gettext),
6 | your module gains a set of macros for translations, for example:
7 |
8 | import DashyWeb.Gettext
9 |
10 | # Simple translation
11 | gettext("Here is the string to translate")
12 |
13 | # Plural translation
14 | ngettext("Here is the string to translate",
15 | "Here are the strings to translate",
16 | 3)
17 |
18 | # Domain-based translation
19 | dgettext("errors", "Here is the error message to translate")
20 |
21 | See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
22 | """
23 | use Gettext, otp_app: :dashy
24 | end
25 |
--------------------------------------------------------------------------------
/priv/repo/migrations/20210514152214_create_workflow_run_jobs.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repo.Migrations.CreateWorkflowRunJobs do
2 | use Ecto.Migration
3 |
4 | def change do
5 | create table("workflow_run_jobs") do
6 | add :external_id, :bigint, null: false
7 | add :name, :string, null: false
8 | add :node_id, :string, null: false
9 | add :conclusion, :string, null: false
10 | add :status, :string, null: false
11 | add :started_at, :utc_datetime, null: false
12 | add :completed_at, :utc_datetime, null: true
13 |
14 | add :workflow_run_id, references(:workflow_runs, column: :external_id, type: :integer),
15 | null: false
16 |
17 | add :metadata, :jsonb
18 |
19 | timestamps(inserted_at: :created_at)
20 | end
21 |
22 | create unique_index(:workflow_run_jobs, [:external_id])
23 | end
24 | end
25 |
--------------------------------------------------------------------------------
/lib/dashy_web/components/card/card_title_component.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Components.CardTitle do
2 | use Surface.Component
3 |
4 | @doc "The title of the card"
5 | prop title, :string, required: true
6 |
7 | @doc "The subtitle of the card"
8 | prop subtitle, :string, required: false
9 |
10 | def render(assigns) do
11 | ~H"""
12 |
13 |
14 |
15 |
16 | {{ @title }}
17 |
18 |
19 | {{ @subtitle }}
20 |
21 |
22 |
23 |
24 | """
25 | end
26 | end
27 |
--------------------------------------------------------------------------------
/.devcontainer/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM elixir:1.11.2-slim
2 |
3 | ARG USERNAME=vscode
4 |
5 | RUN apt-get update && \
6 | apt-get install -y postgresql-client && \
7 | apt-get install -y inotify-tools && \
8 | apt-get install -y vim && \
9 | apt-get install -y git && \
10 | apt-get install -y curl && \
11 | apt-get install -y wget && \
12 | apt-get install -y gnupg2 && \
13 | apt-get install -y rubygems && \
14 | apt-get install -y rename && \
15 | apt-get install -y zsh
16 |
17 | RUN curl -sL https://deb.nodesource.com/setup_15.x | bash -
18 |
19 | RUN apt-get update && \
20 | apt-get install -y nodejs
21 |
22 | RUN gem install htmlbeautifier
23 |
24 | COPY setup-container.sh .
25 | RUN bash ./setup-container.sh
26 |
27 | ENV MIX_HOME=/root/.mix
28 | ENV HEX_HOME=/root/.hex
29 |
30 | RUN mix local.hex --force && \
31 | mix local.rebar --force
32 |
--------------------------------------------------------------------------------
/test/support/factories/workflow_run_job_factory.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.WorkflowRunJobFactory do
2 | @moduledoc """
3 | This module contains the workflow run job factories.
4 | """
5 |
6 | use ExMachina.Ecto, repo: Dashy.Repo
7 |
8 | defmacro __using__(_opts) do
9 | quote do
10 | def workflow_run_job_factory do
11 | %Dashy.WorkflowRunJobs.WorkflowRunJob{
12 | workflow_run: build(:workflow_run),
13 | name: "My workflow run job",
14 | conclusion: "completed",
15 | status: "completed",
16 | node_id: sequence(:node_id, &"node-id-#{&1}"),
17 | external_id: sequence(:workflow_run_job_external_id, fn id -> id end),
18 | started_at: DateTime.utc_now(),
19 | completed_at: DateTime.utc_now(),
20 | head_sha: "i8twygrheiugnyeourytenvor8oyy",
21 | metadata: %{"foo" => 1}
22 | }
23 | end
24 | end
25 | end
26 | end
27 |
--------------------------------------------------------------------------------
/lib/dashy/release.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Release do
2 | @app :dashy
3 |
4 | def db_create do
5 | load_app()
6 |
7 | for repo <- repos() do
8 | :ok =
9 | case repo.__adapter__.storage_up(repo.config) do
10 | :ok -> :ok
11 | {:error, :already_up} -> :ok
12 | {:error, term} -> {:error, term}
13 | end
14 | end
15 | end
16 |
17 | def db_migrate do
18 | load_app()
19 |
20 | for repo <- repos() do
21 | {:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :up, all: true))
22 | end
23 | end
24 |
25 | def db_rollback(repo, version) do
26 | load_app()
27 | {:ok, _, _} = Ecto.Migrator.with_repo(repo, &Ecto.Migrator.run(&1, :down, to: version))
28 | end
29 |
30 | defp repos do
31 | Application.fetch_env!(@app, :ecto_repos)
32 | end
33 |
34 | defp load_app do
35 | Application.load(@app)
36 | Application.ensure_all_started(:ssl)
37 | end
38 | end
39 |
--------------------------------------------------------------------------------
/lib/dashy_web/components/modal/modal_component.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Components.Modal do
2 | use Surface.Component
3 |
4 | @doc "The content of the Modal"
5 | slot default, required: true
6 |
7 | def render(assigns) do
8 | ~H"""
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 | """
20 | end
21 | end
22 |
--------------------------------------------------------------------------------
/assets/tailwind.config.js:
--------------------------------------------------------------------------------
1 | const defaultTheme = require("tailwindcss/defaultTheme")
2 |
3 | module.exports = {
4 | plugins: [require("@tailwindcss/forms")],
5 | purge: {
6 | enabled: false,
7 | content: [
8 | "../**/*.leex",
9 | "../**/*.eex",
10 | "../**/*.ex",
11 | "../**/*.exs",
12 | "./**/*.js",
13 | ],
14 | },
15 | theme: {
16 | extend: {
17 | maxWidth: {
18 | card: "628px",
19 | },
20 | fontFamily: {
21 | sans: ["Inter var", ...defaultTheme.fontFamily.sans],
22 | },
23 | inset: {
24 | "2px": "2px",
25 | },
26 | scale: {
27 | flip: "-1",
28 | },
29 | gridTemplateColumns: {
30 | "sortable-list":
31 | "16px minmax(70px, 1fr) minmax(140px, 1fr) minmax(40px, 1fr) 50px",
32 | },
33 | },
34 | },
35 |
36 | variants: {
37 | extend: {
38 | backgroundColor: ["checked"],
39 | borderColor: ["checked"],
40 | },
41 | },
42 | }
43 |
--------------------------------------------------------------------------------
/assets/js/charts/parts/parts.js:
--------------------------------------------------------------------------------
1 | import Chart from "chart.js/auto"
2 | import "chartjs-adapter-luxon"
3 |
4 | import { config, buildDatasets } from "./config"
5 |
6 | const PartsHooks = {
7 | mounted() {
8 | var chart = new Chart(this.el, config)
9 |
10 | this.handleEvent("load-parts", ({ data }) => {
11 | chart.data = {
12 | datasets: buildDatasets(data),
13 | }
14 | chart.update()
15 |
16 | const $partsList = document.getElementById(this.el.dataset.listId)
17 |
18 | Object.values(document.getElementsByClassName("part_name")).forEach(
19 | (part) => {
20 | part.addEventListener("mouseover", (e) => {
21 | window.partNameFocus = e.currentTarget.dataset.slug
22 | chart.update()
23 | })
24 | }
25 | )
26 | $partsList.addEventListener("mouseout", (_) => {
27 | window.partNameFocus = null
28 | chart.update()
29 | })
30 | })
31 | },
32 | }
33 |
34 | export default PartsHooks
35 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # The directory Mix will write compiled artifacts to.
2 | /_build/
3 |
4 | # If you run "mix test --cover", coverage assets end up here.
5 | /cover/
6 |
7 | # The directory Mix downloads your dependencies sources to.
8 | /deps/
9 |
10 | # Where 3rd-party dependencies like ExDoc output generated docs.
11 | /doc/
12 |
13 | # Ignore .fetch files in case you like to edit your project deps locally.
14 | /.fetch
15 |
16 | # If the VM crashes, it generates a dump, let's ignore it too.
17 | erl_crash.dump
18 |
19 | # Also ignore archive artifacts (built via "mix archive.build").
20 | *.ez
21 |
22 | # Ignore package tarball (built via "mix hex.build").
23 | dashy-*.tar
24 |
25 | # If NPM crashes, it generates a log, let's ignore it too.
26 | npm-debug.log
27 |
28 | # The directory NPM downloads your dependencies sources to.
29 | /assets/node_modules/
30 |
31 | # Since we are building assets from assets/,
32 | # we ignore priv/static. You may want to comment
33 | # this depending on your deployment strategy.
34 | /priv/static/
35 |
36 | config/*.local.exs
--------------------------------------------------------------------------------
/lib/dashy/charts/workflow_runs_fake.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Charts.WorkflowRunsFake do
2 | alias Dashy.Charts.Run
3 |
4 | def runs(_repo, opts \\ []) do
5 | count = Keyword.get(opts, :count, 50)
6 | fake_runs(DateTime.now!("Etc/UTC"), [], count)
7 | end
8 |
9 | defp fake_runs(_last_run_date, data, 0), do: data
10 |
11 | defp fake_runs(last_run_date, data, n) do
12 | time = DateTime.add(last_run_date, -:rand.uniform(1_000_000), :second)
13 | seconds = :rand.normal(180, 30) |> Float.round()
14 |
15 | run = %Run{
16 | time: time,
17 | seconds: seconds,
18 | minutes: seconds / 60,
19 | link: "https://github.com/decidim/decidim/commit/d3b88afe90e5643848e94ef13ee1e850bbc01e2d",
20 | status: set_status()
21 | }
22 |
23 | fake_runs(time, [run | data], n - 1)
24 | end
25 |
26 | defp set_status() do
27 | case :rand.uniform(100) do
28 | x when x < 20.0 -> "cancelled"
29 | x when x < 40.0 -> "error"
30 | x when x < 60.0 -> "pending"
31 | _ -> "success"
32 | end
33 | end
34 | end
35 |
--------------------------------------------------------------------------------
/lib/dashy/fetchers/workflows_fetcher.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Fetchers.WorkflowsFetcher do
2 | @behaviour GitHubWorkflowsFetcher
3 |
4 | @expected_fields ~w(
5 | id node_id name path state created_at updated_at
6 | )
7 |
8 | @impl GitHubWorkflowsFetcher
9 | def get(repo) do
10 | case GitHubClient.get(url(repo)) do
11 | {:ok, response} -> %{body: process(response.body)}
12 | {:error, _} = err -> err
13 | end
14 | end
15 |
16 | defp process(body) do
17 | body
18 | |> Jason.decode!()
19 | |> Map.get("workflows")
20 | |> parse()
21 | end
22 |
23 | defp parse(nil), do: []
24 |
25 | defp parse(workflows) do
26 | workflows
27 | |> Enum.map(fn workflow ->
28 | workflow
29 | |> Map.take(@expected_fields)
30 | |> Map.new(fn {k, v} -> {String.to_atom(rename_key(k)), v} end)
31 | end)
32 | end
33 |
34 | defp rename_key("id"), do: "external_id"
35 | defp rename_key(key), do: key
36 |
37 | defp url(repo) do
38 | "https://api.github.com/repos/#{repo}/actions/workflows?per_page=100"
39 | end
40 | end
41 |
--------------------------------------------------------------------------------
/assets/stylelint.config.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | extends: [
3 | "stylelint-config-standard",
4 | "stylelint-config-rational-order",
5 | "stylelint-prettier/recommended",
6 | ],
7 | plugins: ["stylelint-prettier"],
8 | rules: {
9 | "prettier/prettier": true,
10 | "at-rule-empty-line-before": [
11 | "always",
12 | {
13 | except: [
14 | "blockless-after-blockless",
15 | "blockless-after-same-name-blockless",
16 | "first-nested",
17 | ],
18 | ignore: ["after-comment", "inside-block"],
19 | ignoreAtRules: ["apply", "screen", "font-face", "nest"],
20 | },
21 | ],
22 | "at-rule-no-unknown": [
23 | true,
24 | {
25 | ignoreAtRules: [
26 | "tailwind",
27 | "variants",
28 | "responsive",
29 | "apply",
30 | "screen",
31 | ],
32 | },
33 | ],
34 | "property-no-unknown": [
35 | true,
36 | {
37 | ignoreProperties: ["font-path"],
38 | },
39 | ],
40 | "selector-nested-pattern": "^&",
41 | },
42 | }
43 |
--------------------------------------------------------------------------------
/test/dashy/workflows_test.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.WorkflowsTest do
2 | use Dashy.DataCase
3 |
4 | alias Dashy.Workflows
5 |
6 | describe "get_by_external_id/1" do
7 | test "finds the workflow" do
8 | workflow = insert(:workflow)
9 |
10 | assert workflow ==
11 | Workflows.get_by_external_id(workflow.external_id)
12 | |> Dashy.Repo.preload(:repository)
13 | end
14 | end
15 |
16 | describe "create_or_update/1" do
17 | test "creates a workflow" do
18 | repo = insert(:repository)
19 | attrs = params_for(:workflow) |> Map.merge(%{repository_id: repo.id})
20 |
21 | assert {:ok, _workflow} = Workflows.create_or_update(attrs)
22 | end
23 |
24 | test "updates the record when external_id is existing in the DB" do
25 | workflow = insert(:workflow, state: "active")
26 |
27 | attrs =
28 | params_for(:workflow)
29 | |> Map.merge(%{external_id: workflow.external_id, state: "cancelled"})
30 |
31 | assert {:ok, workflow} = Workflows.create_or_update(attrs)
32 | assert workflow.state == "cancelled"
33 | end
34 | end
35 | end
36 |
--------------------------------------------------------------------------------
/priv/repo/dev_seeds.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.DevSeeder do
2 | @moduledoc """
3 | Module to create fake data to use in development environment
4 | """
5 |
6 | alias Dashy.Repositories
7 | alias Dashy.Repositories.Repository
8 | alias Dashy.Workflows.Workflow
9 | alias Dashy.WorkflowRuns.WorkflowRun
10 | alias Dashy.WorkflowRunJobs.WorkflowRunJob
11 |
12 | alias Dashy.Repo
13 |
14 |
15 | def create_repo(user, name, branch) do
16 | {:ok, repo} = Repositories.create_repository(%{user: user, name: name, branch: branch})
17 | Dashy.Fetcher.update_workflows(repo)
18 | Dashy.Fetcher.update_workflow_runs(repo)
19 | Dashy.Fetcher.update_all_workflow_run_jobs(repo)
20 | end
21 |
22 | def delete_data do
23 | Repo.delete_all(WorkflowRunJob)
24 | Repo.delete_all(WorkflowRun)
25 | Repo.delete_all(Workflow)
26 | Repo.delete_all(Repository)
27 | end
28 |
29 | end
30 |
31 | Dashy.DevSeeder.delete_data()
32 | Dashy.DevSeeder.create_repo("decidim", "decidim-bulletin-board", "develop")
33 | Dashy.DevSeeder.create_repo("ether", "etherpad-lite", "develop")
34 | Dashy.DevSeeder.create_repo("discourse", "discourse", "master")
35 |
--------------------------------------------------------------------------------
/test/dashy/workflow_runs_test.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.WorkflowRunsTest do
2 | use Dashy.DataCase
3 |
4 | alias Dashy.WorkflowRuns
5 |
6 | describe "get_by_external_id/1" do
7 | test "finds the workflow_run" do
8 | workflow_run = insert(:workflow_run)
9 |
10 | assert workflow_run.id == WorkflowRuns.get_by_external_id(workflow_run.external_id).id
11 | end
12 | end
13 |
14 | describe "create_or_update/1" do
15 | test "creates a workflow_run" do
16 | workflow = insert(:workflow)
17 | attrs = params_for(:workflow_run) |> Map.merge(%{workflow_id: workflow.external_id})
18 |
19 | assert {:ok, _workflow_run} = WorkflowRuns.create_or_update(attrs)
20 | end
21 |
22 | test "updates the record when external_id is existing in the DB" do
23 | workflow_run = insert(:workflow_run, status: "active")
24 |
25 | attrs =
26 | params_for(:workflow_run)
27 | |> Map.merge(%{external_id: workflow_run.external_id, status: "cancelled"})
28 |
29 | assert {:ok, workflow_run} = WorkflowRuns.create_or_update(attrs)
30 | assert workflow_run.status == "cancelled"
31 | end
32 | end
33 | end
34 |
--------------------------------------------------------------------------------
/lib/dashy/workflow_runs.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.WorkflowRuns do
2 | alias Dashy.Repo
3 | alias Dashy.WorkflowRuns.WorkflowRun
4 | alias Dashy.WorkflowRunJobs.WorkflowRunJob
5 |
6 | import Ecto.Query
7 |
8 | def get_by_external_id(id), do: Repo.get_by(WorkflowRun, external_id: id)
9 |
10 | def create_or_update(attrs) do
11 | case get_by_external_id(attrs.external_id) do
12 | nil -> %WorkflowRun{}
13 | workflow_run -> workflow_run
14 | end
15 | |> WorkflowRun.changeset(attrs)
16 | |> Repo.insert_or_update()
17 | end
18 |
19 | def update_from_jobs(external_id) do
20 | from(
21 | j in WorkflowRunJob,
22 | select: %{
23 | started_at: min(j.started_at),
24 | completed_at: max(j.completed_at)
25 | },
26 | where: j.workflow_run_id == ^external_id,
27 | group_by: j.workflow_run_id
28 | )
29 | |> Repo.one()
30 | |> update_if_exists(external_id)
31 | end
32 |
33 | defp update_if_exists(nil, _), do: nil
34 |
35 | defp update_if_exists(attrs, external_id) do
36 | get_by_external_id(external_id)
37 | |> WorkflowRun.changeset(attrs)
38 | |> Repo.update()
39 | end
40 | end
41 |
--------------------------------------------------------------------------------
/lib/dashy/application.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Application do
2 | # See https://hexdocs.pm/elixir/Application.html
3 | # for more information on OTP Applications
4 | @moduledoc false
5 |
6 | use Application
7 |
8 | def start(_type, _args) do
9 | children = [
10 | # Start the Ecto repository
11 | Dashy.Repo,
12 | # Start the Telemetry supervisor
13 | DashyWeb.Telemetry,
14 | # Start the PubSub system
15 | {Phoenix.PubSub, name: Dashy.PubSub},
16 | # Start the Endpoint (http/https)
17 | DashyWeb.Endpoint,
18 | # Start a worker by calling: Dashy.Worker.start_link(arg)
19 | # {Dashy.Worker, arg}
20 | {DynamicSupervisor, strategy: :one_for_one, name: Dashy.FetcherSupervisor}
21 | ]
22 |
23 | # See https://hexdocs.pm/elixir/Supervisor.html
24 | # for other strategies and supported options
25 | opts = [strategy: :one_for_one, name: Dashy.Supervisor]
26 | Supervisor.start_link(children, opts)
27 | end
28 |
29 | # Tell Phoenix to update the endpoint configuration
30 | # whenever the application is updated.
31 | def config_change(changed, _new, removed) do
32 | DashyWeb.Endpoint.config_change(changed, removed)
33 | :ok
34 | end
35 | end
36 |
--------------------------------------------------------------------------------
/test/dashy/workflow_run_jobs_test.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.WorkflowRunJobsTest do
2 | use Dashy.DataCase
3 |
4 | alias Dashy.WorkflowRunJobs
5 |
6 | describe "get_by_external_id/1" do
7 | test "finds the workflow_run_job" do
8 | workflow_run_job = insert(:workflow_run_job)
9 |
10 | assert workflow_run_job.id ==
11 | WorkflowRunJobs.get_by_external_id(workflow_run_job.external_id).id
12 | end
13 | end
14 |
15 | describe "create_or_update/1" do
16 | test "creates a workflow_run_job" do
17 | run = insert(:workflow_run)
18 | attrs = params_for(:workflow_run_job) |> Map.merge(%{workflow_run_id: run.external_id})
19 |
20 | assert {:ok, _workflow_run_job} = WorkflowRunJobs.create_or_update(attrs)
21 | end
22 |
23 | test "updates the record when external_id is existing in the DB" do
24 | workflow_run_job = insert(:workflow_run_job, status: "active")
25 |
26 | attrs =
27 | params_for(:workflow_run_job)
28 | |> Map.merge(%{external_id: workflow_run_job.external_id, status: "cancelled"})
29 |
30 | assert {:ok, workflow_run_job} = WorkflowRunJobs.create_or_update(attrs)
31 | assert workflow_run_job.status == "cancelled"
32 | end
33 | end
34 | end
35 |
--------------------------------------------------------------------------------
/lib/dashy_web/channels/user_socket.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.UserSocket do
2 | use Phoenix.Socket
3 |
4 | ## Channels
5 | # channel "room:*", DashyWeb.RoomChannel
6 |
7 | # Socket params are passed from the client and can
8 | # be used to verify and authenticate a user. After
9 | # verification, you can put default assigns into
10 | # the socket that will be set for all channels, ie
11 | #
12 | # {:ok, assign(socket, :user_id, verified_user_id)}
13 | #
14 | # To deny connection, return `:error`.
15 | #
16 | # See `Phoenix.Token` documentation for examples in
17 | # performing token verification on connect.
18 | @impl true
19 | def connect(_params, socket, _connect_info) do
20 | {:ok, socket}
21 | end
22 |
23 | # Socket id's are topics that allow you to identify all sockets for a given user:
24 | #
25 | # def id(socket), do: "user_socket:#{socket.assigns.user_id}"
26 | #
27 | # Would allow you to broadcast a "disconnect" event and terminate
28 | # all active sockets and channels for a given user:
29 | #
30 | # DashyWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
31 | #
32 | # Returning `nil` makes this socket anonymous.
33 | @impl true
34 | def id(_socket), do: nil
35 | end
36 |
--------------------------------------------------------------------------------
/test/support/channel_case.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.ChannelCase do
2 | @moduledoc """
3 | This module defines the test case to be used by
4 | channel tests.
5 |
6 | Such tests rely on `Phoenix.ChannelTest` and also
7 | import other functionality to make it easier
8 | to build common data structures and query the data layer.
9 |
10 | Finally, if the test case interacts with the database,
11 | we enable the SQL sandbox, so changes done to the database
12 | are reverted at the end of every test. If you are using
13 | PostgreSQL, you can even run database tests asynchronously
14 | by setting `use DashyWeb.ChannelCase, async: true`, although
15 | this option is not recommended for other databases.
16 | """
17 |
18 | use ExUnit.CaseTemplate
19 |
20 | using do
21 | quote do
22 | # Import conveniences for testing with channels
23 | import Phoenix.ChannelTest
24 | import DashyWeb.ChannelCase
25 |
26 | # The default endpoint for testing
27 | @endpoint DashyWeb.Endpoint
28 | end
29 | end
30 |
31 | setup tags do
32 | :ok = Ecto.Adapters.SQL.Sandbox.checkout(Dashy.Repo)
33 |
34 | unless tags[:async] do
35 | Ecto.Adapters.SQL.Sandbox.mode(Dashy.Repo, {:shared, self()})
36 | end
37 |
38 | :ok
39 | end
40 | end
41 |
--------------------------------------------------------------------------------
/lib/dashy/workflow_run_jobs/workflow_run_job.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.WorkflowRunJobs.WorkflowRunJob do
2 | use Dashy.Schema
3 | import Ecto.Changeset
4 |
5 | schema "workflow_run_jobs" do
6 | field :external_id, :integer
7 | field :name, :string
8 | field :node_id, :string
9 | field :status, :string
10 | field :conclusion, :string
11 | field :started_at, :utc_datetime
12 | field :completed_at, :utc_datetime
13 | field :head_sha, :string
14 | field :metadata, :map
15 |
16 | belongs_to :workflow_run, Dashy.WorkflowRuns.WorkflowRun, references: :external_id
17 | timestamps(inserted_at: :created_at)
18 | end
19 |
20 | @doc false
21 | def changeset(workflow, attrs) do
22 | workflow
23 | |> cast(attrs, [
24 | :external_id,
25 | :name,
26 | :node_id,
27 | :status,
28 | :conclusion,
29 | :started_at,
30 | :completed_at,
31 | :workflow_run_id,
32 | :head_sha,
33 | :metadata
34 | ])
35 | |> validate_required([
36 | :external_id,
37 | :name,
38 | :node_id,
39 | :status,
40 | :conclusion,
41 | :started_at,
42 | :workflow_run_id,
43 | :head_sha,
44 | :metadata
45 | ])
46 | |> unique_constraint(:external_id)
47 | end
48 | end
49 |
--------------------------------------------------------------------------------
/config/config.exs:
--------------------------------------------------------------------------------
1 | # This file is responsible for configuring your application
2 | # and its dependencies with the aid of the Mix.Config module.
3 | #
4 | # This configuration file is loaded before any dependency and
5 | # is restricted to this project.
6 |
7 | # General application configuration
8 | use Mix.Config
9 |
10 | config :dashy,
11 | ecto_repos: [Dashy.Repo]
12 |
13 | # Configures the endpoint
14 | config :dashy, DashyWeb.Endpoint,
15 | url: [host: "localhost"],
16 | secret_key_base: "aolxoORjdoDyW02zzqvJvVDSe3zaT/Gimz5o5CHsao8oz5JYVLtUl3xEpJ2wbqBP",
17 | render_errors: [view: DashyWeb.ErrorView, accepts: ~w(html json), layout: false],
18 | pubsub_server: Dashy.PubSub,
19 | live_view: [signing_salt: "y4Dah9+Z"]
20 |
21 | # Configures Elixir's Logger
22 | config :logger, :console,
23 | format: "$time $metadata[$level] $message\n",
24 | metadata: [:request_id]
25 |
26 | # Use Jason for JSON parsing in Phoenix
27 | config :phoenix, :json_library, Jason
28 |
29 | config :dashy, Dashy.Repo, migration_timestamps: [type: :utc_datetime_usec]
30 |
31 | config :dashy, Dashy.Fetcher, token: System.get_env("GITHUB_TOKEN")
32 |
33 | # Import environment specific config. This must remain at the bottom
34 | # of this file so it overrides the configuration defined above.
35 | import_config "#{Mix.env()}.exs"
36 |
--------------------------------------------------------------------------------
/lib/dashy/workflow_runs/workflow_run.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.WorkflowRuns.WorkflowRun do
2 | use Dashy.Schema
3 | import Ecto.Changeset
4 |
5 | schema "workflow_runs" do
6 | field :external_id, :integer
7 | field :name, :string
8 | field :node_id, :string
9 | field :status, :string
10 | field :conclusion, :string
11 | field :metadata, :map
12 | field :head_sha, :string
13 | field :started_at, :utc_datetime
14 | field :completed_at, :utc_datetime
15 |
16 | belongs_to :workflow, Dashy.Workflows.Workflow, references: :external_id
17 | timestamps(inserted_at: :created_at)
18 | end
19 |
20 | @doc false
21 | def changeset(workflow, attrs) do
22 | workflow
23 | |> cast(attrs, [
24 | :created_at,
25 | :updated_at,
26 | :external_id,
27 | :name,
28 | :node_id,
29 | :status,
30 | :conclusion,
31 | :workflow_id,
32 | :metadata,
33 | :head_sha,
34 | :started_at,
35 | :completed_at
36 | ])
37 | |> validate_required([
38 | :external_id,
39 | :name,
40 | :node_id,
41 | :status,
42 | :conclusion,
43 | :workflow_id,
44 | :metadata,
45 | :head_sha
46 | ])
47 | |> unique_constraint(:external_id)
48 | |> foreign_key_constraint(:workflow_id)
49 | end
50 | end
51 |
--------------------------------------------------------------------------------
/test/support/factories/workflow_run_factory.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.WorkflowRunFactory do
2 | @moduledoc """
3 | This module contains the workflow run factories.
4 | """
5 |
6 | use ExMachina.Ecto, repo: Dashy.Repo
7 |
8 | defmacro __using__(_opts) do
9 | quote do
10 | def workflow_run_factory do
11 | %Dashy.WorkflowRuns.WorkflowRun{
12 | workflow: build(:workflow),
13 | name: "My workflow run",
14 | conclusion: "completed",
15 | status: "completed",
16 | node_id: sequence(:node_id, &"node-id-#{&1}"),
17 | external_id: sequence(:workflow_run_external_id, fn id -> id end),
18 | head_sha: "2345678sdf5678dfs67543dsfgdrs",
19 | metadata: %{"foo" => 1}
20 | }
21 | end
22 |
23 | def workflow_run_with_workflow_id_factory do
24 | %Dashy.WorkflowRuns.WorkflowRun{
25 | workflow_id: insert(:workflow).external_id,
26 | name: "My workflow run",
27 | conclusion: "completed",
28 | status: "completed",
29 | node_id: sequence(:node_id, &"node-id-#{&1}"),
30 | external_id: sequence(:workflow_run_external_id, fn id -> id end),
31 | head_sha: "2345678sdf5678dfs67543dsfgdrs",
32 | metadata: %{"foo" => 1}
33 | }
34 | end
35 | end
36 | end
37 | end
38 |
--------------------------------------------------------------------------------
/.github/workflows/docs.yml:
--------------------------------------------------------------------------------
1 | name: Documentation
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | test:
10 | name: Docs
11 | runs-on: ubuntu-20.04
12 | steps:
13 | - uses: actions/checkout@v2.3.4
14 | - name: Copy config templates
15 | working-directory: "config"
16 | run: |
17 | cp dev.local.exs.example dev.local.exs
18 | cp test.local.exs.example test.local.exs
19 | - name: Set up Elixir
20 | uses: actions/setup-elixir@v1.5
21 | with:
22 | elixir-version: "1.11.2" # Define the elixir version [required]
23 | otp-version: "23.0" # Define the OTP version [required]
24 | experimental-otp: true
25 | - name: Restore deps cache
26 | uses: actions/cache@v2.1.5
27 | with:
28 | path: |
29 | **/_build
30 | **/deps
31 | key: ${{ runner.os }}-mix-${{ hashFiles('**/mix.lock') }}
32 | restore-keys: ${{ runner.os }}-mix-
33 | - name: Install dependencies
34 | run: mix deps.get
35 | - name: Generate docs
36 | run: mix docs
37 | - name: Deploy 🚀
38 | uses: JamesIves/github-pages-deploy-action@4.1.3
39 | with:
40 | branch: gh-pages # The branch the action should deploy to.
41 | folder: doc # The folder the action should deploy.
42 |
--------------------------------------------------------------------------------
/lib/dashy_web/live/page_live.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.PageLive do
2 | use Surface.LiveView
3 |
4 | alias DashyWeb.Router.Helpers, as: Routes
5 |
6 | alias Dashy.Repositories
7 |
8 | alias DashyWeb.Components.Layout
9 | alias DashyWeb.Components.Card
10 | alias DashyWeb.Components.CardContent
11 | alias DashyWeb.Components.CardTitle
12 |
13 | @impl true
14 | def mount(_params, _session, socket) do
15 | repos = Repositories.list_repositories() |> Enum.take(10)
16 | {:ok, assign(socket, repos: repos)}
17 | end
18 |
19 | @impl true
20 | def render(assigns) do
21 | ~H"""
22 |
39 | """
40 | end
41 | end
42 |
--------------------------------------------------------------------------------
/lib/dashy_web/router.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Router do
2 | use DashyWeb, :router
3 |
4 | pipeline :browser do
5 | plug :accepts, ["html"]
6 | plug :fetch_session
7 | plug :fetch_live_flash
8 | plug :put_root_layout, {DashyWeb.LayoutView, :root}
9 | plug :protect_from_forgery
10 | plug :put_secure_browser_headers
11 | end
12 |
13 | pipeline :api do
14 | plug :accepts, ["json"]
15 | end
16 |
17 | scope "/", DashyWeb do
18 | pipe_through :browser
19 |
20 | live "/", PageLive, :index
21 | live "/ui", UILive, :index
22 | live "/repo/:user/:name", RepoLive, :index
23 | end
24 |
25 | # Other scopes may use custom stacks.
26 | # scope "/api", DashyWeb do
27 | # pipe_through :api
28 | # end
29 |
30 | # Enables LiveDashboard only for development
31 | #
32 | # If you want to use the LiveDashboard in production, you should put
33 | # it behind authentication and allow only admins to access it.
34 | # If your application does not have an admins-only section yet,
35 | # you can use Plug.BasicAuth to set up some basic authentication
36 | # as long as you are also using SSL (which you should anyway).
37 | if Mix.env() in [:dev, :test] do
38 | import Phoenix.LiveDashboard.Router
39 |
40 | scope "/" do
41 | pipe_through :browser
42 | live_dashboard "/dashboard", metrics: DashyWeb.Telemetry
43 | end
44 | end
45 | end
46 |
--------------------------------------------------------------------------------
/config/prod.secret.exs:
--------------------------------------------------------------------------------
1 | # In this file, we load production configuration and secrets
2 | # from environment variables. You can also hardcode secrets,
3 | # although such is generally not recommended and you have to
4 | # remember to add this file to your .gitignore.
5 | use Mix.Config
6 |
7 | database_url =
8 | System.get_env("DATABASE_URL") ||
9 | raise """
10 | environment variable DATABASE_URL is missing.
11 | For example: ecto://USER:PASS@HOST/DATABASE
12 | """
13 |
14 | config :dashy, Dashy.Repo,
15 | ssl: true,
16 | url: database_url,
17 | pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
18 |
19 | secret_key_base =
20 | System.get_env("SECRET_KEY_BASE") ||
21 | raise """
22 | environment variable SECRET_KEY_BASE is missing.
23 | You can generate one by calling: mix phx.gen.secret
24 | """
25 |
26 | config :dashy, DashyWeb.Endpoint,
27 | http: [
28 | port: String.to_integer(System.get_env("PORT") || "4000"),
29 | transport_options: [socket_opts: [:inet6]]
30 | ],
31 | secret_key_base: secret_key_base
32 |
33 | # ## Using releases (Elixir v1.9+)
34 | #
35 | # If you are doing OTP releases, you need to instruct Phoenix
36 | # to start each relevant endpoint:
37 | #
38 | # config :dashy, DashyWeb.Endpoint, server: true
39 | #
40 | # Then you can assemble a release by calling `mix release`.
41 | # See `mix help release` for more information.
42 |
--------------------------------------------------------------------------------
/test/support/conn_case.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.ConnCase do
2 | @moduledoc """
3 | This module defines the test case to be used by
4 | tests that require setting up a connection.
5 |
6 | Such tests rely on `Phoenix.ConnTest` and also
7 | import other functionality to make it easier
8 | to build common data structures and query the data layer.
9 |
10 | Finally, if the test case interacts with the database,
11 | we enable the SQL sandbox, so changes done to the database
12 | are reverted at the end of every test. If you are using
13 | PostgreSQL, you can even run database tests asynchronously
14 | by setting `use DashyWeb.ConnCase, async: true`, although
15 | this option is not recommended for other databases.
16 | """
17 |
18 | use ExUnit.CaseTemplate
19 |
20 | using do
21 | quote do
22 | # Import conveniences for testing with connections
23 | import Plug.Conn
24 | import Phoenix.ConnTest
25 | import DashyWeb.ConnCase
26 |
27 | alias DashyWeb.Router.Helpers, as: Routes
28 |
29 | # The default endpoint for testing
30 | @endpoint DashyWeb.Endpoint
31 | end
32 | end
33 |
34 | setup tags do
35 | :ok = Ecto.Adapters.SQL.Sandbox.checkout(Dashy.Repo)
36 |
37 | unless tags[:async] do
38 | Ecto.Adapters.SQL.Sandbox.mode(Dashy.Repo, {:shared, self()})
39 | end
40 |
41 | {:ok, conn: Phoenix.ConnTest.build_conn()}
42 | end
43 | end
44 |
--------------------------------------------------------------------------------
/config/releases.exs:
--------------------------------------------------------------------------------
1 | # In this file, we load production configuration and secrets
2 | # from environment variables. You can also hardcode secrets,
3 | # although such is generally not recommended and you have to
4 | # remember to add this file to your .gitignore.
5 | import Config
6 |
7 | database_url =
8 | System.get_env("DATABASE_URL") ||
9 | raise """
10 | environment variable DATABASE_URL is missing.
11 | For example: ecto://USER:PASS@HOST/DATABASE
12 | """
13 |
14 | config :dashy, Dashy.Repo,
15 | # ssl: true,
16 | url: database_url,
17 | pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10")
18 |
19 | secret_key_base =
20 | System.get_env("SECRET_KEY_BASE") ||
21 | raise """
22 | environment variable SECRET_KEY_BASE is missing.
23 | You can generate one by calling: mix phx.gen.secret
24 | """
25 |
26 | # ## Using releases (Elixir v1.9+)
27 | #
28 | # If you are doing OTP releases, you need to instruct Phoenix
29 | # to start each relevant endpoint:
30 | #
31 | # config :dashy, DashyWeb.Endpoint, server: true
32 | #
33 | # Then you can assemble a release by calling `mix release`.
34 | # See `mix help release` for more information.
35 | config :dashy, DashyWeb.Endpoint,
36 | http: [
37 | port: String.to_integer(System.get_env("PORT") || "4000"),
38 | transport_options: [socket_opts: [:inet6]]
39 | ],
40 | secret_key_base: secret_key_base,
41 | server: true
42 |
43 | config :dashy, Dashy.Fetcher, token: System.get_env("GITHUB_TOKEN")
44 |
--------------------------------------------------------------------------------
/assets/js/charts/runs/config.js:
--------------------------------------------------------------------------------
1 | import toTime from "../utils.js"
2 |
3 | const COLORS = {
4 | pending: "#FBBF24CC",
5 | success: "#28A745CC",
6 | error: "#FF3333CC",
7 | cancelled: "#999999CC",
8 | }
9 | function colorize(ctx) {
10 | return COLORS[ctx?.raw?.status]
11 | }
12 |
13 | function visitRun(_event, array) {
14 | if (array[0]) {
15 | window.open(array[0].element.$context.raw.link)
16 | }
17 | }
18 |
19 | export function buildLabels(data) {
20 | return {
21 | labels: data.map((run) => run.time),
22 | datasets: [
23 | {
24 | data: data,
25 | borderRadius: 2,
26 | },
27 | ],
28 | }
29 | }
30 |
31 | export const config = {
32 | type: "bar",
33 | data: buildLabels([]),
34 | options: {
35 | animation: false,
36 | scales: {
37 | x: {
38 | title: {
39 | display: true,
40 | text: "Run",
41 | },
42 | },
43 | y: {
44 | title: {
45 | display: true,
46 | text: "Minutes",
47 | },
48 | },
49 | },
50 | plugins: {
51 | legend: false,
52 | tooltip: {
53 | callbacks: {
54 | beforeTitle: (ctx) => ctx[0].raw.status.toUpperCase(),
55 | label: toTime,
56 | },
57 | },
58 | },
59 | elements: {
60 | bar: {
61 | backgroundColor: colorize,
62 | },
63 | },
64 | parsing: {
65 | xAxisKey: "minutes",
66 | yAxisKey: "minutes",
67 | },
68 | onClick: visitRun,
69 | },
70 | }
71 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM elixir:1.11-alpine AS build
2 |
3 | EXPOSE 5000
4 | ARG host=
5 | ARG databaseUrl=
6 | ARG secretKeyBase=
7 | ARG githubToken=
8 |
9 | ENV PORT=5000
10 | ENV MIX_ENV=prod
11 | ENV HOST=$host
12 | ENV DATABASE_URL=$databaseUrl
13 | ENV SECRET_KEY_BASE=$secretKeyBase
14 | ENV GITHUB_TOKEN=$githubToken
15 |
16 | # install build dependencies
17 | RUN apk add --no-cache build-base npm git
18 |
19 | # prepare build dir
20 | WORKDIR /app
21 |
22 | # install hex + rebar
23 | RUN mix local.hex --force && \
24 | mix local.rebar --force
25 |
26 | # set build ENV
27 | ENV MIX_ENV=prod
28 |
29 | # install mix dependencies
30 | COPY mix.exs mix.lock ./
31 | COPY config config
32 | RUN mix do deps.get, deps.compile
33 |
34 | # build assets
35 | COPY assets/package.json assets/package-lock.json ./assets/
36 | RUN npm --prefix ./assets ci --progress=false --no-audit --loglevel=error
37 |
38 | COPY priv priv
39 | COPY assets assets
40 | RUN npm run --prefix ./assets deploy
41 | RUN mix phx.digest
42 |
43 | # compile and build release
44 | COPY lib lib
45 | # uncomment COPY if rel/ exists
46 | # COPY rel rel
47 | RUN mix do compile, release
48 |
49 | # prepare release image
50 | FROM alpine:3.9 AS app
51 | RUN apk add --no-cache openssl ncurses-libs
52 |
53 | WORKDIR /app
54 |
55 | RUN chown nobody:nobody /app
56 |
57 | USER nobody:nobody
58 |
59 | COPY --from=build --chown=nobody:nobody /app/_build/prod/rel/dashy ./
60 |
61 | ARG host=
62 | ENV HOST=$host
63 | ENV HOME=/app
64 | ENV PORT=5000
65 |
66 | ENTRYPOINT ["bin/dashy"]
67 |
68 | CMD ["start"]
--------------------------------------------------------------------------------
/assets/webpack.config.js:
--------------------------------------------------------------------------------
1 | const path = require("path")
2 | const glob = require("glob")
3 | const MiniCssExtractPlugin = require("mini-css-extract-plugin")
4 | const TerserPlugin = require("terser-webpack-plugin")
5 | const CssMinimizerPlugin = require("css-minimizer-webpack-plugin")
6 | const CopyWebpackPlugin = require("copy-webpack-plugin")
7 |
8 | module.exports = (env, options) => {
9 | const devMode = options.mode !== "production"
10 |
11 | return {
12 | optimization: {
13 | minimize: !devMode,
14 | minimizer: [
15 | new TerserPlugin({ parallel: true }),
16 | new CssMinimizerPlugin(),
17 | ],
18 | },
19 |
20 | entry: {
21 | app: glob.sync("./vendor/**/*.js").concat(["./js/app.js"]),
22 | },
23 | output: {
24 | filename: "[name].js",
25 | path: path.resolve(__dirname, "../priv/static/js"),
26 | publicPath: "/js/",
27 | },
28 | devtool: devMode ? "eval-cheap-module-source-map" : undefined,
29 | module: {
30 | rules: [
31 | {
32 | test: /\.js$/,
33 | exclude: /node_modules/,
34 | use: {
35 | loader: "babel-loader",
36 | },
37 | },
38 | {
39 | test: /\.[s]?css$/,
40 | use: [MiniCssExtractPlugin.loader, "css-loader", "postcss-loader"],
41 | },
42 | ],
43 | },
44 | plugins: [
45 | new MiniCssExtractPlugin({ filename: "../css/app.css" }),
46 | new CopyWebpackPlugin({ patterns: [{ from: "static/", to: "../" }] }),
47 | ],
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/lib/dashy/fetchers/workflow_run_jobs_fetcher.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Fetchers.WorkflowRunJobsFetcher do
2 | @doc """
3 | This module fetches the all the workflow run jobs for the given workflow run.
4 | """
5 | @behaviour GitHubWorkflowRunJobsFetcher
6 |
7 | @expected_fields ~w(
8 | id node_id name conclusion status started_at completed_at head_sha
9 | )
10 |
11 | @impl GitHubWorkflowRunJobsFetcher
12 | def get(repo_name, workflow_run_external_id) do
13 | url = build_url(repo_name, workflow_run_external_id)
14 |
15 | case GitHubClient.get(url) do
16 | {:ok, response} -> %{body: process(response.body, workflow_run_external_id)}
17 | {:error, _} = err -> err
18 | end
19 | end
20 |
21 | defp process(body, workflow_run_id) do
22 | body
23 | |> Jason.decode!()
24 | |> Map.get("jobs")
25 | |> parse_body(workflow_run_id)
26 | end
27 |
28 | defp parse_body(nil, _), do: []
29 |
30 | defp parse_body(workflow_run_jobs, workflow_run_id) do
31 | workflow_run_jobs
32 | |> Enum.map(fn workflow_run_job ->
33 | parse_workflow_run_job(workflow_run_job, workflow_run_id)
34 | end)
35 | end
36 |
37 | defp parse_workflow_run_job(workflow_run_job, workflow_run_id) do
38 | workflow_run_job
39 | |> Map.take(@expected_fields)
40 | |> Map.new(fn {k, v} -> {String.to_atom(rename_key(k)), v} end)
41 | |> Map.merge(%{metadata: workflow_run_job, workflow_run_id: workflow_run_id})
42 | end
43 |
44 | defp rename_key("id"), do: "external_id"
45 | defp rename_key(key), do: key
46 |
47 | defp build_url(repo, run_id) do
48 | "https://api.github.com/repos/#{repo}/actions/runs/#{run_id}/jobs"
49 | end
50 | end
51 |
--------------------------------------------------------------------------------
/.vscode/settings.json:
--------------------------------------------------------------------------------
1 | {
2 | "css.validate": false,
3 | "editor.tabSize": 2,
4 | "editor.formatOnSave": true,
5 | "stylelint.configBasedir": "assets",
6 | "files.associations": {
7 | "*.css": "postcss"
8 | },
9 | "eslint.workingDirectories": ["./assets"],
10 | "eslint.nodePath": "assets",
11 | "[javascript]": {
12 | "editor.defaultFormatter": "dbaeumer.vscode-eslint"
13 | },
14 | "[json]": {
15 | "editor.defaultFormatter": "esbenp.prettier-vscode"
16 | },
17 | "[css]": {
18 | "editor.defaultFormatter": "stylelint.vscode-stylelint"
19 | },
20 | "[postcss]": {
21 | "editor.defaultFormatter": "stylelint.vscode-stylelint"
22 | },
23 | "[elixir]": {
24 | "rewrap.autoWrap.enabled": true
25 | },
26 | "editor.codeActionsOnSave": {
27 | "source.fixAll": true
28 | },
29 | "editor.quickSuggestions": {
30 | "other": true,
31 | "comments": false,
32 | "strings": true
33 | },
34 | "tailwindCSS.includeLanguages": {
35 | "elixir": "javascript"
36 | },
37 | "elixirLS.suggestSpecs": false,
38 | "elixirLS.enableTestLenses": true,
39 | "elixir.credo.configurationFile": ".credo.exs",
40 | "elixir.credo.ignoreWarningMessages": true,
41 | "tailwindCSS.emmetCompletions": true,
42 | "tailwindCSS.experimental.classRegex": ["class:\\s*\"([^\"]*)\""],
43 | "headwind.classRegex": {
44 | "html": "\\bclass\\s*[=:]\\s*[\\\"\\']([_a-zA-Z0-9\\s\\-\\:\\/]+)[\\\"\\']",
45 | "elixir": "\\bclass\\s*:\\s*[\\\"\\']([_a-zA-Z0-9\\s\\-\\:\\/]+)[\\\"\\']",
46 | "css": "\\B@apply\\s+([_a-zA-Z0-9\\s\\-\\:\\/]+);",
47 | "javascript": "(?:\\bclassName\\s*=\\s*[\\\"\\']([_a-zA-Z0-9\\s\\-\\:\\/]+)[\\\"\\'])|(?:\\btw\\s*`([_a-zA-Z0-9\\s\\-\\:\\/]*)`)"
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/assets/js/app.js:
--------------------------------------------------------------------------------
1 | // We need to import the CSS so that webpack will load it.
2 | // The MiniCssExtractPlugin is used to separate it out into
3 | // its own CSS file.
4 | import "../css/app.css"
5 |
6 | // webpack automatically bundles all modules in your
7 | // entry points. Those entry points can be configured
8 | // in "webpack.config.js".
9 | //
10 | // Import deps with the dep name or local files with a relative path, for example:
11 | //
12 | // import {Socket} from "phoenix"
13 | // import socket from "./socket"
14 | //
15 | import "alpinejs"
16 | import "phoenix_html"
17 | import { Socket } from "phoenix"
18 | import NProgress from "nprogress"
19 | import { LiveSocket } from "phoenix_live_view"
20 | import Hooks from "./hooks"
21 |
22 | const csrfToken = document
23 | .querySelector("meta[name='csrf-token']")
24 | .getAttribute("content")
25 | const liveSocket = new LiveSocket("/live", Socket, {
26 | hooks: Hooks,
27 | params: { _csrf_token: csrfToken },
28 | dom: {
29 | onBeforeElUpdated(from, to) {
30 | if (from.__x) {
31 | window.Alpine.clone(from.__x, to)
32 | }
33 | },
34 | },
35 | })
36 |
37 | // Show progress bar on live navigation and form submits
38 | window.addEventListener("phx:page-loading-start", (_info) => NProgress.start())
39 | window.addEventListener("phx:page-loading-stop", (_info) => NProgress.done())
40 |
41 | // connect if there are any LiveViews on the page
42 | liveSocket.connect()
43 |
44 | // expose liveSocket on window for web console debug logs and latency simulation:
45 | // >> liveSocket.enableDebug()
46 | // >> liveSocket.enableLatencySim(1000) // enabled for duration of browser session
47 | // >> liveSocket.disableLatencySim()
48 | window.liveSocket = liveSocket
49 |
--------------------------------------------------------------------------------
/lib/dashy_web/live/colors.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Colors
7 |
13 |
14 |
15 |
16 | Update
17 |
18 |
19 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/lib/dashy_web/views/error_helpers.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.ErrorHelpers do
2 | @moduledoc """
3 | Conveniences for translating and building error messages.
4 | """
5 |
6 | use Phoenix.HTML
7 |
8 | @doc """
9 | Generates tag for inlined form input errors.
10 | """
11 | def error_tag(form, field) do
12 | Enum.map(Keyword.get_values(form.errors, field), fn error ->
13 | content_tag(:span, translate_error(error),
14 | class: "invalid-feedback",
15 | phx_feedback_for: input_id(form, field)
16 | )
17 | end)
18 | end
19 |
20 | @doc """
21 | Translates an error message using gettext.
22 | """
23 | def translate_error({msg, opts}) do
24 | # When using gettext, we typically pass the strings we want
25 | # to translate as a static argument:
26 | #
27 | # # Translate "is invalid" in the "errors" domain
28 | # dgettext("errors", "is invalid")
29 | #
30 | # # Translate the number of files with plural rules
31 | # dngettext("errors", "1 file", "%{count} files", count)
32 | #
33 | # Because the error messages we show in our forms and APIs
34 | # are defined inside Ecto, we need to translate them dynamically.
35 | # This requires us to call the Gettext module passing our gettext
36 | # backend as first argument.
37 | #
38 | # Note we use the "errors" domain, which means translations
39 | # should be written to the errors.po file. The :count option is
40 | # set by Ecto and indicates we should also apply plural rules.
41 | if count = opts[:count] do
42 | Gettext.dngettext(DashyWeb.Gettext, "errors", msg, msg, count, opts)
43 | else
44 | Gettext.dgettext(DashyWeb.Gettext, "errors", msg, opts)
45 | end
46 | end
47 | end
48 |
--------------------------------------------------------------------------------
/test/support/data_case.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.DataCase do
2 | @moduledoc """
3 | This module defines the setup for tests requiring
4 | access to the application's data layer.
5 |
6 | You may define functions here to be used as helpers in
7 | your tests.
8 |
9 | Finally, if the test case interacts with the database,
10 | we enable the SQL sandbox, so changes done to the database
11 | are reverted at the end of every test. If you are using
12 | PostgreSQL, you can even run database tests asynchronously
13 | by setting `use Dashy.DataCase, async: true`, although
14 | this option is not recommended for other databases.
15 | """
16 |
17 | use ExUnit.CaseTemplate
18 |
19 | using do
20 | quote do
21 | alias Dashy.Repo
22 |
23 | import Ecto
24 | import Ecto.Changeset
25 | import Ecto.Query
26 | import Dashy.DataCase
27 | import Dashy.Factory
28 | end
29 | end
30 |
31 | setup tags do
32 | :ok = Ecto.Adapters.SQL.Sandbox.checkout(Dashy.Repo)
33 |
34 | unless tags[:async] do
35 | Ecto.Adapters.SQL.Sandbox.mode(Dashy.Repo, {:shared, self()})
36 | end
37 |
38 | :ok
39 | end
40 |
41 | @doc """
42 | A helper that transforms changeset errors into a map of messages.
43 |
44 | assert {:error, changeset} = Accounts.create_user(%{password: "short"})
45 | assert "password is too short" in errors_on(changeset).password
46 | assert %{password: ["password is too short"]} = errors_on(changeset)
47 |
48 | """
49 | def errors_on(changeset) do
50 | Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
51 | Regex.replace(~r"%{(\w+)}", message, fn _, key ->
52 | opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
53 | end)
54 | end)
55 | end
56 | end
57 |
--------------------------------------------------------------------------------
/lib/dashy/charts/workflow_runs.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Charts.WorkflowRuns do
2 | alias Dashy.Charts.Run
3 | alias Dashy.Workflows.Workflow
4 | alias Dashy.WorkflowRuns.WorkflowRun
5 |
6 | alias Dashy.Repo
7 | import Ecto.Query
8 |
9 | def runs(repo, _opts \\ []) do
10 | from(
11 | r in WorkflowRun,
12 | join: w in Workflow,
13 | on: r.workflow_id == w.external_id,
14 | select: %{
15 | started_at: min(r.started_at),
16 | completed_at: max(r.completed_at),
17 | conclusion: fragment("array_agg(?)", r.conclusion),
18 | head_sha: r.head_sha
19 | },
20 | where: w.repository_id == ^repo.id,
21 | where: not is_nil(r.started_at),
22 | where: not is_nil(r.completed_at),
23 | group_by: r.head_sha,
24 | order_by: min(r.started_at)
25 | )
26 | |> Repo.all()
27 | |> Enum.map(fn data ->
28 | seconds = DateTime.diff(data.completed_at, data.started_at)
29 |
30 | %Run{
31 | time: data.started_at,
32 | seconds: seconds,
33 | minutes: seconds / 60,
34 | link: link_for(repo, data),
35 | status: status_from(data.conclusion)
36 | }
37 | end)
38 | end
39 |
40 | defp link_for(%{user: user, name: name}, %{head_sha: sha}),
41 | do: "https://github.com/#{user}/#{name}/commit/#{sha}"
42 |
43 | defp status_from(list) do
44 | cond do
45 | Enum.any?(list, fn e -> e == nil || e == "pending" end) ->
46 | "pending"
47 |
48 | Enum.any?(list, fn e -> e == "failure" end) ->
49 | "error"
50 |
51 | Enum.any?(list, fn e -> e == "cancelled" end) ->
52 | "cancelled"
53 |
54 | (list |> Enum.uniq()) -- ["skipped", "success"] == [] ->
55 | "success"
56 |
57 | true ->
58 | "other"
59 | end
60 | end
61 | end
62 |
--------------------------------------------------------------------------------
/lib/dashy_web/endpoint.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Endpoint do
2 | use Phoenix.Endpoint, otp_app: :dashy
3 |
4 | # The session will be stored in the cookie and signed,
5 | # this means its contents can be read but not tampered with.
6 | # Set :encryption_salt if you would also like to encrypt it.
7 | @session_options [
8 | store: :cookie,
9 | key: "_dashy_key",
10 | signing_salt: "WKPvdfoQ"
11 | ]
12 |
13 | socket "/socket", DashyWeb.UserSocket,
14 | websocket: true,
15 | longpoll: false
16 |
17 | socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
18 |
19 | # Serve at "/" the static files from "priv/static" directory.
20 | #
21 | # You should set gzip to true if you are running phx.digest
22 | # when deploying your static files in production.
23 | plug Plug.Static,
24 | at: "/",
25 | from: :dashy,
26 | gzip: false,
27 | only: ~w(css fonts images js favicon.ico robots.txt)
28 |
29 | # Code reloading can be explicitly enabled under the
30 | # :code_reloader configuration of your endpoint.
31 | if code_reloading? do
32 | socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
33 | plug Phoenix.LiveReloader
34 | plug Phoenix.CodeReloader
35 | plug Phoenix.Ecto.CheckRepoStatus, otp_app: :dashy
36 | end
37 |
38 | plug Phoenix.LiveDashboard.RequestLogger,
39 | param_key: "request_logger",
40 | cookie_key: "request_logger"
41 |
42 | plug Plug.RequestId
43 | plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
44 |
45 | plug Plug.Parsers,
46 | parsers: [:urlencoded, :multipart, :json],
47 | pass: ["*/*"],
48 | json_decoder: Phoenix.json_library()
49 |
50 | plug Plug.MethodOverride
51 | plug Plug.Head
52 | plug Plug.Session, @session_options
53 | plug DashyWeb.Router
54 | end
55 |
--------------------------------------------------------------------------------
/assets/js/charts/parts/config.js:
--------------------------------------------------------------------------------
1 | import toTime from "../utils.js"
2 |
3 | export const config = {
4 | type: "line",
5 | data: {},
6 | options: {
7 | animation: false,
8 | scales: {
9 | x: {
10 | type: "time",
11 | time: {
12 | tooltipFormat: "DD T",
13 | },
14 | title: {
15 | display: true,
16 | text: "Date",
17 | },
18 | },
19 | y: {
20 | title: {
21 | display: true,
22 | text: "Minutes",
23 | },
24 | },
25 | },
26 | plugins: {
27 | legend: false,
28 | tooltip: {
29 | callbacks: {
30 | beforeTitle: (ctx) => ctx[0].raw.name.toUpperCase(),
31 | label: toTime,
32 | },
33 | },
34 | },
35 | parsing: {
36 | xAxisKey: "time",
37 | yAxisKey: "minutes",
38 | },
39 | },
40 | }
41 |
42 | export function showPartColor(label, color) {
43 | return () => {
44 | if (window.partNameFocus && label != window.partNameFocus) {
45 | return "#CCC"
46 | } else {
47 | return color
48 | }
49 | }
50 | }
51 |
52 | export function showPartBorderWidth(label) {
53 | return () => {
54 | if (label == window.partNameFocus) {
55 | return 2
56 | } else {
57 | return 1
58 | }
59 | }
60 | }
61 |
62 | export function buildDatasets(data) {
63 | return data.map(({ label, data, color }) => {
64 | return {
65 | label: label,
66 | data: data,
67 | cubicInterpolationMode: "monotone",
68 | pointBackgroundColor: showPartColor(label, color),
69 | borderColor: showPartColor(label, color),
70 | borderWidth: showPartBorderWidth(label),
71 | tension: 0.2,
72 | pointStyle: "circle",
73 | pointBorderWidth: 0,
74 | pointRadius: 2,
75 | }
76 | })
77 | }
78 |
--------------------------------------------------------------------------------
/lib/dashy_web/components/button/button_component.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Components.Button do
2 | use Surface.Component
3 |
4 | @doc "Extra Css classes"
5 | prop class, :css_class, required: false
6 |
7 | @doc "The type of the button"
8 | prop type, :string, default: "button"
9 |
10 | @doc "The color of the button"
11 | prop color, :string, default: "primary"
12 |
13 | @doc "The class of the button"
14 | prop background, :string, default: "primary"
15 |
16 | @doc "The content for the button"
17 | slot default, required: true
18 |
19 | @doc "Event emitted by the button"
20 | prop click, :event, required: false
21 |
22 | @doc "Value emitted by the button"
23 | prop value, :string, required: false
24 |
25 | def render(assigns) do
26 | ~H"""
27 |
39 |
40 |
41 | """
42 | end
43 | end
44 |
--------------------------------------------------------------------------------
/assets/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "repository": {},
3 | "description": " ",
4 | "license": "MIT",
5 | "scripts": {
6 | "deploy": "NODE_ENV=production webpack --mode production",
7 | "lint": "npm-run-all eslint stylelint",
8 | "eslint": "eslint --ext .js --ignore-path ../.gitignore .",
9 | "stylelint": "stylelint **/*.css",
10 | "watch": "webpack --mode development --watch"
11 | },
12 | "dependencies": {
13 | "@tailwindcss/forms": "^0.3.2",
14 | "alpinejs": "^2.8.2",
15 | "autoprefixer": "^10.2.5",
16 | "chart.js": "^3.2.1",
17 | "chartjs-adapter-luxon": "^1.0.0",
18 | "luxon": "^1.27.0",
19 | "nprogress": "^0.2.0",
20 | "phoenix": "file:../deps/phoenix",
21 | "phoenix_html": "file:../deps/phoenix_html",
22 | "phoenix_live_view": "file:../deps/phoenix_live_view"
23 | },
24 | "devDependencies": {
25 | "@babel/core": "^7.13.14",
26 | "@babel/preset-env": "^7.13.12",
27 | "babel-eslint": "^10.1.0",
28 | "babel-loader": "^8.2.2",
29 | "copy-webpack-plugin": "^8.1.1",
30 | "css-loader": "^5.2.4",
31 | "css-minimizer-webpack-plugin": "^1.3.0",
32 | "eslint": "^7.26.0",
33 | "eslint-config-prettier": "^8.3.0",
34 | "eslint-plugin-prettier": "^3.3.1",
35 | "mini-css-extract-plugin": "^1.6.0",
36 | "npm-run-all": "^4.1.5",
37 | "postcss-import": "^14.0.2",
38 | "postcss-loader": "^5.2.0",
39 | "postcss-nested": "^5.0.5",
40 | "prettier": "^2.3.0",
41 | "stylelint": "^13.13.1",
42 | "stylelint-config-prettier": "^8.0.2",
43 | "stylelint-config-rational-order": "^0.1.2",
44 | "stylelint-config-standard": "^21.0.0",
45 | "stylelint-order": "^4.1.0",
46 | "stylelint-prettier": "^1.2.0",
47 | "stylelint-scss": "^3.19.0",
48 | "tailwindcss": "^2.1.2",
49 | "terser-webpack-plugin": "^5.1.1",
50 | "vfile-location": "^4.0.0",
51 | "webpack": "5.37.0",
52 | "webpack-cli": "^4.7.0"
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/lib/dashy_web/telemetry.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Telemetry do
2 | use Supervisor
3 | import Telemetry.Metrics
4 |
5 | def start_link(arg) do
6 | Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
7 | end
8 |
9 | @impl true
10 | def init(_arg) do
11 | children = [
12 | # Telemetry poller will execute the given period measurements
13 | # every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
14 | {:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
15 | # Add reporters as children of your supervision tree.
16 | # {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
17 | ]
18 |
19 | Supervisor.init(children, strategy: :one_for_one)
20 | end
21 |
22 | def metrics do
23 | [
24 | # Phoenix Metrics
25 | summary("phoenix.endpoint.stop.duration",
26 | unit: {:native, :millisecond}
27 | ),
28 | summary("phoenix.router_dispatch.stop.duration",
29 | tags: [:route],
30 | unit: {:native, :millisecond}
31 | ),
32 |
33 | # Database Metrics
34 | summary("dashy.repo.query.total_time", unit: {:native, :millisecond}),
35 | summary("dashy.repo.query.decode_time", unit: {:native, :millisecond}),
36 | summary("dashy.repo.query.query_time", unit: {:native, :millisecond}),
37 | summary("dashy.repo.query.queue_time", unit: {:native, :millisecond}),
38 | summary("dashy.repo.query.idle_time", unit: {:native, :millisecond}),
39 |
40 | # VM Metrics
41 | summary("vm.memory.total", unit: {:byte, :kilobyte}),
42 | summary("vm.total_run_queue_lengths.total"),
43 | summary("vm.total_run_queue_lengths.cpu"),
44 | summary("vm.total_run_queue_lengths.io")
45 | ]
46 | end
47 |
48 | defp periodic_measurements do
49 | [
50 | # A module, function and arguments to be invoked periodically.
51 | # This function must call :telemetry.execute/3 and a metric must be added above.
52 | # {DashyWeb, :count_users, []}
53 | ]
54 | end
55 | end
56 |
--------------------------------------------------------------------------------
/lib/dashy/fetchers/workflow_runs_fetcher.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Fetchers.WorkflowRunsFetcher do
2 | @doc """
3 | This module fetches the latest workflow runs for the given branch and the
4 | given page.
5 | """
6 | @behaviour GitHubWorkflowRunsFetcher
7 |
8 | @expected_fields ~w(
9 | id node_id name conclusion status created_at updated_at workflow_id head_sha
10 | )
11 |
12 | @impl GitHubWorkflowRunsFetcher
13 | def get(repo_name, branch, page) do
14 | url = build_url(repo_name, branch, page)
15 |
16 | case GitHubClient.get(url) do
17 | {:ok, response} -> %{body: process(response.body, branch)}
18 | {:error, _} = err -> err
19 | end
20 | end
21 |
22 | defp process(body, branch) do
23 | body
24 | |> Jason.decode!()
25 | |> Map.get("workflow_runs")
26 | |> parse_body(branch)
27 | end
28 |
29 | defp parse_body(nil, _), do: []
30 |
31 | defp parse_body(workflow_runs, branch) do
32 | workflow_runs
33 | |> Enum.map(fn workflow_run -> parse_workflow_run(workflow_run, branch) end)
34 | |> remove_nils()
35 | end
36 |
37 | defp parse_workflow_run(%{"head_branch" => branch} = workflow_run, branch) do
38 | workflow_run
39 | |> uniq_sha_for_scheduled_runs()
40 | |> Map.take(@expected_fields)
41 | |> Map.new(fn {k, v} -> {String.to_atom(rename_key(k)), v} end)
42 | |> Map.merge(%{metadata: workflow_run})
43 | end
44 |
45 | defp parse_workflow_run(_workflow_run, _branch), do: nil
46 |
47 | defp uniq_sha_for_scheduled_runs(
48 | %{"event" => "schedule", "head_sha" => head_sha, "id" => external_id} = workflow_run
49 | ) do
50 | workflow_run
51 | |> Map.merge(%{"head_sha" => "#{head_sha}-#{external_id}"})
52 | end
53 |
54 | defp uniq_sha_for_scheduled_runs(workflow_run), do: workflow_run
55 |
56 | defp rename_key("id"), do: "external_id"
57 | defp rename_key(key), do: key
58 |
59 | defp build_url(repo, branch, page) do
60 | "https://api.github.com/repos/#{repo}/actions/runs?per_page=100&branch=#{branch}&page=#{page}"
61 | end
62 |
63 | defp remove_nils(list) do
64 | list |> Enum.reject(&is_nil/1)
65 | end
66 | end
67 |
--------------------------------------------------------------------------------
/test/dashy/charts/workflow_runs_test.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Charts.WorkflowRunsTest do
2 | use Dashy.DataCase
3 |
4 | describe "runs/1" do
5 | test "lists all runs" do
6 | sha = "1"
7 | repo = insert(:repository)
8 | workflow = insert(:workflow, repository: repo)
9 |
10 | insert(:workflow_run,
11 | head_sha: sha,
12 | created_at: ~U[2021-05-16 09:00:00Z],
13 | started_at: ~U[2021-05-16 09:00:00Z],
14 | completed_at: ~U[2021-05-16 10:00:00Z],
15 | status: "completed",
16 | conclusion: "success",
17 | workflow: workflow
18 | )
19 |
20 | insert(:workflow_run,
21 | head_sha: sha,
22 | created_at: ~U[2021-05-16 09:00:00Z],
23 | started_at: ~U[2021-05-16 09:00:00Z],
24 | completed_at: ~U[2021-05-16 11:00:00Z],
25 | status: "completed",
26 | conclusion: "success",
27 | workflow: workflow
28 | )
29 |
30 | assert [%Dashy.Charts.Run{} = fetched_run] = Dashy.Charts.WorkflowRuns.runs(repo)
31 |
32 | # 2 hours
33 | assert fetched_run.minutes == 120
34 | assert fetched_run.seconds == 7200
35 | assert fetched_run.time == ~U[2021-05-16 09:00:00Z]
36 | assert fetched_run.status == "success"
37 | assert fetched_run.link == "https://github.com/#{repo.user}/#{repo.name}/commit/#{sha}"
38 | end
39 |
40 | test "handles the state correctly" do
41 | sha = "1"
42 |
43 | repo = insert(:repository)
44 | workflow = insert(:workflow, repository: repo)
45 |
46 | insert(:workflow_run,
47 | head_sha: sha,
48 | created_at: ~U[2021-05-16 09:00:00Z],
49 | started_at: ~U[2021-05-16 09:00:00Z],
50 | completed_at: ~U[2021-05-16 10:00:00Z],
51 | status: "completed",
52 | conclusion: "success",
53 | workflow: workflow
54 | )
55 |
56 | insert(:workflow_run,
57 | head_sha: sha,
58 | created_at: ~U[2021-05-16 09:00:00Z],
59 | started_at: ~U[2021-05-16 09:00:00Z],
60 | completed_at: ~U[2021-05-16 11:00:00Z],
61 | status: "completed",
62 | conclusion: "failure",
63 | workflow: workflow
64 | )
65 |
66 | assert [%Dashy.Charts.Run{} = fetched_run] = Dashy.Charts.WorkflowRuns.runs(repo)
67 |
68 | assert fetched_run.status == "error"
69 | end
70 | end
71 | end
72 |
--------------------------------------------------------------------------------
/config/prod.exs:
--------------------------------------------------------------------------------
1 | use Mix.Config
2 |
3 | # For production, don't forget to configure the url host
4 | # to something meaningful, Phoenix uses this information
5 | # when generating URLs.
6 | #
7 | # Note we also include the path to a cache manifest
8 | # containing the digested version of static files. This
9 | # manifest is generated by the `mix phx.digest` task,
10 | # which you should run after static files are built and
11 | # before starting your production server.
12 | config :dashy, DashyWeb.Endpoint,
13 | http: [port: {:system, "PORT"}],
14 | url: [
15 | scheme: "https",
16 | host: System.get_env("HOST") || "dashy.codegram.io",
17 | port: 443
18 | ],
19 | force_ssl: [rewrite_on: [:x_forwarded_proto]],
20 | cache_static_manifest: "priv/static/cache_manifest.json"
21 |
22 | # Do not print debug messages in production
23 | config :logger, level: :info
24 |
25 | # ## SSL Support
26 | #
27 | # To get SSL working, you will need to add the `https` key
28 | # to the previous section and set your `:url` port to 443:
29 | #
30 | # config :dashy, DashyWeb.Endpoint,
31 | # ...
32 | # url: [host: "example.com", port: 443],
33 | # https: [
34 | # port: 443,
35 | # cipher_suite: :strong,
36 | # keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
37 | # certfile: System.get_env("SOME_APP_SSL_CERT_PATH"),
38 | # transport_options: [socket_opts: [:inet6]]
39 | # ]
40 | #
41 | # The `cipher_suite` is set to `:strong` to support only the
42 | # latest and more secure SSL ciphers. This means old browsers
43 | # and clients may not be supported. You can set it to
44 | # `:compatible` for wider support.
45 | #
46 | # `:keyfile` and `:certfile` expect an absolute path to the key
47 | # and cert in disk or a relative path inside priv, for example
48 | # "priv/ssl/server.key". For all supported SSL configuration
49 | # options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
50 | #
51 | # We also recommend setting `force_ssl` in your endpoint, ensuring
52 | # no data is ever sent via http, always redirecting to https:
53 | #
54 | # config :dashy, DashyWeb.Endpoint,
55 | # force_ssl: [hsts: true]
56 | #
57 | # Check `Plug.SSL` for all available options in `force_ssl`.
58 |
59 | # Finally import the config/prod.secret.exs which loads secrets
60 | # and configuration from environment variables.
61 | import_config "prod.secret.exs"
62 |
--------------------------------------------------------------------------------
/config/dev.exs:
--------------------------------------------------------------------------------
1 | use Mix.Config
2 |
3 | # Configure your database
4 | config :dashy, Dashy.Repo,
5 | username: "postgres",
6 | password: "postgres",
7 | database: "database",
8 | hostname: "db",
9 | show_sensitive_data_on_connection_error: true,
10 | pool_size: 10
11 |
12 | # For development, we disable any cache and enable
13 | # debugging and code reloading.
14 | #
15 | # The watchers configuration can be used to run external
16 | # watchers to your application. For example, we use it
17 | # with webpack to recompile .js and .css sources.
18 | config :dashy, DashyWeb.Endpoint,
19 | http: [port: 4000],
20 | debug_errors: true,
21 | code_reloader: true,
22 | check_origin: false,
23 | watchers: [
24 | node: [
25 | "node_modules/webpack/bin/webpack.js",
26 | "--mode",
27 | "development",
28 | "--watch",
29 | "--watch-options-stdin",
30 | cd: Path.expand("../assets", __DIR__)
31 | ]
32 | ]
33 |
34 | # ## SSL Support
35 | #
36 | # In order to use HTTPS in development, a self-signed
37 | # certificate can be generated by running the following
38 | # Mix task:
39 | #
40 | # mix phx.gen.cert
41 | #
42 | # Note that this task requires Erlang/OTP 20 or later.
43 | # Run `mix help phx.gen.cert` for more information.
44 | #
45 | # The `http:` config above can be replaced with:
46 | #
47 | # https: [
48 | # port: 4001,
49 | # cipher_suite: :strong,
50 | # keyfile: "priv/cert/selfsigned_key.pem",
51 | # certfile: "priv/cert/selfsigned.pem"
52 | # ],
53 | #
54 | # If desired, both `http:` and `https:` keys can be
55 | # configured to run both http and https servers on
56 | # different ports.
57 |
58 | # Watch static and templates for browser reloading.
59 | config :dashy, DashyWeb.Endpoint,
60 | live_reload: [
61 | patterns: [
62 | ~r"priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$",
63 | ~r"priv/gettext/.*(po)$",
64 | ~r"lib/dashy_web/(live|views)/.*(ex)$",
65 | ~r"lib/dashy_web/templates/.*(eex)$",
66 | ~r"lib/dashy_web/live/.*(sface)$"
67 | ]
68 | ]
69 |
70 | # Do not include metadata nor timestamps in development logs
71 | config :logger, :console, format: "[$level] $message\n"
72 |
73 | # Set a higher stacktrace during development. Avoid configuring such
74 | # in production as building large stacktraces may be expensive.
75 | config :phoenix, :stacktrace_depth, 20
76 |
77 | # Initialize plugs at runtime for faster development compilation
78 | config :phoenix, :plug_init_mode, :runtime
79 |
80 | import_config "dev.local.exs"
81 |
--------------------------------------------------------------------------------
/mix.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.MixProject do
2 | use Mix.Project
3 |
4 | def project do
5 | [
6 | app: :dashy,
7 | version: "0.1.0",
8 | elixir: "~> 1.7",
9 | elixirc_paths: elixirc_paths(Mix.env()),
10 | compilers: [:phoenix, :gettext] ++ Mix.compilers(),
11 | start_permanent: Mix.env() == :prod,
12 | aliases: aliases(),
13 | deps: deps()
14 | ]
15 | end
16 |
17 | # Configuration for the OTP application.
18 | #
19 | # Type `mix help compile.app` for more information.
20 | def application do
21 | [
22 | mod: {Dashy.Application, []},
23 | extra_applications: [:logger, :runtime_tools]
24 | ]
25 | end
26 |
27 | # Specifies which paths to compile per environment.
28 | defp elixirc_paths(:test), do: ["lib", "test/support"]
29 | defp elixirc_paths(_), do: ["lib"]
30 |
31 | # Specifies your project dependencies.
32 | #
33 | # Type `mix help deps` for examples and options.
34 | defp deps do
35 | [
36 | {:phoenix, "~> 1.5.7"},
37 | {:phoenix_ecto, "~> 4.1"},
38 | {:ecto_sql, "~> 3.4"},
39 | {:postgrex, ">= 0.0.0"},
40 | {:phoenix_live_view, "~> 0.15.0"},
41 | {:floki, ">= 0.27.0", only: :test},
42 | {:phoenix_html, "~> 2.11"},
43 | {:phoenix_live_reload, "~> 1.2", only: :dev},
44 | {:phoenix_live_dashboard, "~> 0.4"},
45 | {:telemetry_metrics, "~> 0.4"},
46 | {:telemetry_poller, "~> 0.4"},
47 | {:gettext, "~> 0.11"},
48 | {:credo, "~> 1.5", only: [:dev, :test], runtime: false},
49 | {:jason, "~> 1.0"},
50 | {:plug_cowboy, "~> 2.0"},
51 | {:mix_test_watch, "~> 1.0", only: :dev, runtime: false},
52 | {:ex_doc, "~> 0.23", only: :dev, runtime: false},
53 | {:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false},
54 | {:httpoison, "~> 1.8"},
55 | {:ex_machina, "~> 2.7.0"},
56 | {:surface, "~> 0.4.0"}
57 | ]
58 | end
59 |
60 | # Aliases are shortcuts or tasks specific to the current project.
61 | # For example, to install project dependencies and perform other setup tasks, run:
62 | #
63 | # $ mix setup
64 | #
65 | # See the documentation for `Mix` for more info on aliases.
66 | defp aliases do
67 | [
68 | setup: ["deps.get", "ecto.setup", "cmd npm install --prefix assets"],
69 | "ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
70 | "ecto.reset": ["ecto.drop", "ecto.setup"],
71 | test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"]
72 | ]
73 | end
74 | end
75 |
--------------------------------------------------------------------------------
/priv/gettext/en/LC_MESSAGES/errors.po:
--------------------------------------------------------------------------------
1 | ## `msgid`s in this file come from POT (.pot) files.
2 | ##
3 | ## Do not add, change, or remove `msgid`s manually here as
4 | ## they're tied to the ones in the corresponding POT file
5 | ## (with the same domain).
6 | ##
7 | ## Use `mix gettext.extract --merge` or `mix gettext.merge`
8 | ## to merge POT files into PO files.
9 | msgid ""
10 | msgstr ""
11 | "Language: en\n"
12 |
13 | ## From Ecto.Changeset.cast/4
14 | msgid "can't be blank"
15 | msgstr ""
16 |
17 | ## From Ecto.Changeset.unique_constraint/3
18 | msgid "has already been taken"
19 | msgstr ""
20 |
21 | ## From Ecto.Changeset.put_change/3
22 | msgid "is invalid"
23 | msgstr ""
24 |
25 | ## From Ecto.Changeset.validate_acceptance/3
26 | msgid "must be accepted"
27 | msgstr ""
28 |
29 | ## From Ecto.Changeset.validate_format/3
30 | msgid "has invalid format"
31 | msgstr ""
32 |
33 | ## From Ecto.Changeset.validate_subset/3
34 | msgid "has an invalid entry"
35 | msgstr ""
36 |
37 | ## From Ecto.Changeset.validate_exclusion/3
38 | msgid "is reserved"
39 | msgstr ""
40 |
41 | ## From Ecto.Changeset.validate_confirmation/3
42 | msgid "does not match confirmation"
43 | msgstr ""
44 |
45 | ## From Ecto.Changeset.no_assoc_constraint/3
46 | msgid "is still associated with this entry"
47 | msgstr ""
48 |
49 | msgid "are still associated with this entry"
50 | msgstr ""
51 |
52 | ## From Ecto.Changeset.validate_length/3
53 | msgid "should be %{count} character(s)"
54 | msgid_plural "should be %{count} character(s)"
55 | msgstr[0] ""
56 | msgstr[1] ""
57 |
58 | msgid "should have %{count} item(s)"
59 | msgid_plural "should have %{count} item(s)"
60 | msgstr[0] ""
61 | msgstr[1] ""
62 |
63 | msgid "should be at least %{count} character(s)"
64 | msgid_plural "should be at least %{count} character(s)"
65 | msgstr[0] ""
66 | msgstr[1] ""
67 |
68 | msgid "should have at least %{count} item(s)"
69 | msgid_plural "should have at least %{count} item(s)"
70 | msgstr[0] ""
71 | msgstr[1] ""
72 |
73 | msgid "should be at most %{count} character(s)"
74 | msgid_plural "should be at most %{count} character(s)"
75 | msgstr[0] ""
76 | msgstr[1] ""
77 |
78 | msgid "should have at most %{count} item(s)"
79 | msgid_plural "should have at most %{count} item(s)"
80 | msgstr[0] ""
81 | msgstr[1] ""
82 |
83 | ## From Ecto.Changeset.validate_number/3
84 | msgid "must be less than %{number}"
85 | msgstr ""
86 |
87 | msgid "must be greater than %{number}"
88 | msgstr ""
89 |
90 | msgid "must be less than or equal to %{number}"
91 | msgstr ""
92 |
93 | msgid "must be greater than or equal to %{number}"
94 | msgstr ""
95 |
96 | msgid "must be equal to %{number}"
97 | msgstr ""
98 |
--------------------------------------------------------------------------------
/priv/gettext/errors.pot:
--------------------------------------------------------------------------------
1 | ## This is a PO Template file.
2 | ##
3 | ## `msgid`s here are often extracted from source code.
4 | ## Add new translations manually only if they're dynamic
5 | ## translations that can't be statically extracted.
6 | ##
7 | ## Run `mix gettext.extract` to bring this file up to
8 | ## date. Leave `msgstr`s empty as changing them here has no
9 | ## effect: edit them in PO (`.po`) files instead.
10 |
11 | ## From Ecto.Changeset.cast/4
12 | msgid "can't be blank"
13 | msgstr ""
14 |
15 | ## From Ecto.Changeset.unique_constraint/3
16 | msgid "has already been taken"
17 | msgstr ""
18 |
19 | ## From Ecto.Changeset.put_change/3
20 | msgid "is invalid"
21 | msgstr ""
22 |
23 | ## From Ecto.Changeset.validate_acceptance/3
24 | msgid "must be accepted"
25 | msgstr ""
26 |
27 | ## From Ecto.Changeset.validate_format/3
28 | msgid "has invalid format"
29 | msgstr ""
30 |
31 | ## From Ecto.Changeset.validate_subset/3
32 | msgid "has an invalid entry"
33 | msgstr ""
34 |
35 | ## From Ecto.Changeset.validate_exclusion/3
36 | msgid "is reserved"
37 | msgstr ""
38 |
39 | ## From Ecto.Changeset.validate_confirmation/3
40 | msgid "does not match confirmation"
41 | msgstr ""
42 |
43 | ## From Ecto.Changeset.no_assoc_constraint/3
44 | msgid "is still associated with this entry"
45 | msgstr ""
46 |
47 | msgid "are still associated with this entry"
48 | msgstr ""
49 |
50 | ## From Ecto.Changeset.validate_length/3
51 | msgid "should be %{count} character(s)"
52 | msgid_plural "should be %{count} character(s)"
53 | msgstr[0] ""
54 | msgstr[1] ""
55 |
56 | msgid "should have %{count} item(s)"
57 | msgid_plural "should have %{count} item(s)"
58 | msgstr[0] ""
59 | msgstr[1] ""
60 |
61 | msgid "should be at least %{count} character(s)"
62 | msgid_plural "should be at least %{count} character(s)"
63 | msgstr[0] ""
64 | msgstr[1] ""
65 |
66 | msgid "should have at least %{count} item(s)"
67 | msgid_plural "should have at least %{count} item(s)"
68 | msgstr[0] ""
69 | msgstr[1] ""
70 |
71 | msgid "should be at most %{count} character(s)"
72 | msgid_plural "should be at most %{count} character(s)"
73 | msgstr[0] ""
74 | msgstr[1] ""
75 |
76 | msgid "should have at most %{count} item(s)"
77 | msgid_plural "should have at most %{count} item(s)"
78 | msgstr[0] ""
79 | msgstr[1] ""
80 |
81 | ## From Ecto.Changeset.validate_number/3
82 | msgid "must be less than %{number}"
83 | msgstr ""
84 |
85 | msgid "must be greater than %{number}"
86 | msgstr ""
87 |
88 | msgid "must be less than or equal to %{number}"
89 | msgstr ""
90 |
91 | msgid "must be greater than or equal to %{number}"
92 | msgstr ""
93 |
94 | msgid "must be equal to %{number}"
95 | msgstr ""
96 |
--------------------------------------------------------------------------------
/test/dashy/fetcher_test.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.FetcherTest do
2 | use Dashy.DataCase
3 |
4 | alias Dashy.Fetcher
5 | alias Dashy.Repo
6 | alias Dashy.Workflows.Workflow
7 | alias Dashy.WorkflowRuns.WorkflowRun
8 | alias Dashy.WorkflowRunJobs.WorkflowRunJob
9 |
10 | describe "update_workflows/2" do
11 | test "fetches workflows from the API and saves them to the DB" do
12 | assert [] == Repo.all(Workflow)
13 | repo = insert(:repository)
14 |
15 | Fetcher.update_workflows(repo, with: Dashy.TestFetchers.WorkflowsFetcher)
16 |
17 | assert 3 == Repo.all(Workflow) |> Enum.count()
18 | end
19 |
20 | test "handles errors" do
21 | repo = insert(:repository)
22 |
23 | assert [{:fetch_error, "whoops in #{repo.user}/#{repo.name}"}] ==
24 | Fetcher.update_workflows(repo, with: Dashy.TestFetchers.ErroredFetcher)
25 | end
26 | end
27 |
28 | describe "update_workflow_runs/2" do
29 | test "fetches workflow_runs from the API and saves them to the DB" do
30 | assert [] == Repo.all(WorkflowRun)
31 | repo = insert(:repository)
32 |
33 | Fetcher.update_workflow_runs(repo,
34 | with: Dashy.TestFetchers.WorkflowRunsFetcher
35 | )
36 |
37 | [workflow_run | _] = workflow_runs = Repo.all(WorkflowRun)
38 | assert 2 == workflow_runs |> Enum.count()
39 |
40 | assert %Workflow{} = workflow_run |> Repo.preload(:workflow) |> Map.get(:workflow)
41 | assert %{"foo" => 1} = workflow_run.metadata
42 | assert workflow_run.head_sha
43 | end
44 |
45 | test "handles errors" do
46 | repo = insert(:repository)
47 |
48 | assert [{:fetch_error, "whoops in 1 of #{repo.user}/#{repo.name}, branch #{repo.branch}"}] ==
49 | Fetcher.update_workflow_runs(repo,
50 | with: Dashy.TestFetchers.ErroredFetcher
51 | )
52 | end
53 | end
54 |
55 | describe "update_workflows_run_jobs/2" do
56 | test "fetches workflow run jobs from the API and saves them to the DB" do
57 | assert [] == Repo.all(WorkflowRunJob)
58 | repo = insert(:repository)
59 | workflow_run = insert(:workflow_run)
60 |
61 | Fetcher.update_workflow_run_jobs(repo, workflow_run,
62 | with: Dashy.TestFetchers.WorkflowRunJobsFetcher
63 | )
64 |
65 | assert 2 == Repo.all(WorkflowRunJob) |> Enum.count()
66 | end
67 |
68 | test "handles errors" do
69 | repo = insert(:repository)
70 |
71 | assert [{:fetch_error, "whoops in #{repo.user}/#{repo.name}"}] ==
72 | Fetcher.update_workflows(repo, with: Dashy.TestFetchers.ErroredFetcher)
73 | end
74 | end
75 | end
76 |
--------------------------------------------------------------------------------
/lib/dashy_web.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb do
2 | @moduledoc """
3 | The entrypoint for defining your web interface, such
4 | as controllers, views, channels and so on.
5 |
6 | This can be used in your application as:
7 |
8 | use DashyWeb, :controller
9 | use DashyWeb, :view
10 |
11 | The definitions below will be executed for every view,
12 | controller, etc, so keep them short and clean, focused
13 | on imports, uses and aliases.
14 |
15 | Do NOT define functions inside the quoted expressions
16 | below. Instead, define any helper function in modules
17 | and import those modules here.
18 | """
19 |
20 | def controller do
21 | quote do
22 | use Phoenix.Controller, namespace: DashyWeb
23 |
24 | import Plug.Conn
25 | import DashyWeb.Gettext
26 | alias DashyWeb.Router.Helpers, as: Routes
27 | end
28 | end
29 |
30 | def view do
31 | quote do
32 | use Phoenix.View,
33 | root: "lib/dashy_web/templates",
34 | namespace: DashyWeb
35 |
36 | # Import convenience functions from controllers
37 | import Phoenix.Controller,
38 | only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
39 |
40 | # Include shared imports and aliases for views
41 | unquote(view_helpers())
42 | end
43 | end
44 |
45 | def live_view do
46 | quote do
47 | use Phoenix.LiveView,
48 | layout: {DashyWeb.LayoutView, "live.html"}
49 |
50 | unquote(view_helpers())
51 | end
52 | end
53 |
54 | def live_component do
55 | quote do
56 | use Phoenix.LiveComponent
57 |
58 | unquote(view_helpers())
59 | end
60 | end
61 |
62 | def router do
63 | quote do
64 | use Phoenix.Router
65 |
66 | import Plug.Conn
67 | import Phoenix.Controller
68 | import Phoenix.LiveView.Router
69 | end
70 | end
71 |
72 | def channel do
73 | quote do
74 | use Phoenix.Channel
75 | import DashyWeb.Gettext
76 | end
77 | end
78 |
79 | defp view_helpers do
80 | quote do
81 | # Use all HTML functionality (forms, tags, etc)
82 | use Phoenix.HTML
83 |
84 | # Import LiveView helpers (live_render, live_component, live_patch, etc)
85 | import Phoenix.LiveView.Helpers
86 |
87 | # Import basic rendering functionality (render, render_layout, etc)
88 | import Phoenix.View
89 |
90 | import DashyWeb.ErrorHelpers
91 | import DashyWeb.Gettext
92 | alias DashyWeb.Router.Helpers, as: Routes
93 | end
94 | end
95 |
96 | @doc """
97 | When used, dispatch to the appropriate controller/view/etc.
98 | """
99 | defmacro __using__(which) when is_atom(which) do
100 | apply(__MODULE__, which, [])
101 | end
102 | end
103 |
--------------------------------------------------------------------------------
/lib/dashy/charts/workflow_parts.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Charts.WorkflowParts do
2 | alias Dashy.Charts.Part
3 | alias Dashy.Workflows.Workflow
4 | alias Dashy.WorkflowRuns.WorkflowRun
5 |
6 | alias Dashy.Repo
7 | import Ecto.Query
8 |
9 | alias Dashy.Charts.Helpers
10 |
11 | def parts(repo, opts \\ []) do
12 | last =
13 | from(
14 | r in WorkflowRun,
15 | join: w in Workflow,
16 | on: r.workflow_id == w.external_id,
17 | where: w.repository_id == ^repo.id,
18 | order_by: [desc: r.created_at],
19 | limit: 1
20 | )
21 | |> Repo.one()
22 |
23 | days = Keyword.get(opts, :days, 30)
24 | minimum_start_time = last.created_at |> DateTime.add(-days * 60 * 60 * 24, :second)
25 |
26 | grouped_runs =
27 | from(
28 | r in WorkflowRun,
29 | join: w in Workflow,
30 | on: r.workflow_id == w.external_id,
31 | select: %{
32 | name: r.name,
33 | started_at: r.started_at,
34 | completed_at: r.completed_at,
35 | external_id: r.external_id
36 | },
37 | where: not is_nil(r.started_at),
38 | where: not is_nil(r.completed_at),
39 | where: w.repository_id == ^repo.id,
40 | where: r.created_at > ^minimum_start_time,
41 | order_by: [r.workflow_id, r.created_at]
42 | )
43 | |> Repo.all()
44 | |> Enum.group_by(fn data -> data.name end)
45 | |> Enum.to_list()
46 | |> Enum.sort_by(fn {_, runs} ->
47 | recent = runs |> List.last()
48 | -calculate_seconds(recent.completed_at, recent.started_at)
49 | end)
50 |
51 | parts =
52 | grouped_runs
53 | |> Enum.map(fn {name, _} -> name end)
54 |
55 | colors = Helpers.generate_colors(parts |> Enum.count())
56 |
57 | %{data: build_data(grouped_runs, colors, repo), colors: colors, parts: parts}
58 | end
59 |
60 | def build_data([], [], _repo), do: []
61 |
62 | def build_data([{part_name, runs} | grouped_runs], [color | colors], repo) do
63 | [
64 | %{
65 | label: part_name,
66 | color: Helpers.build_style_color(color),
67 | data:
68 | runs
69 | |> Enum.map(fn run ->
70 | seconds = calculate_seconds(run.completed_at, run.started_at)
71 |
72 | %Part{
73 | name: part_name,
74 | time: run.started_at,
75 | seconds: seconds,
76 | minutes: seconds / 60,
77 | link: link_for(repo, run)
78 | }
79 | end)
80 | }
81 | | build_data(grouped_runs, colors, repo)
82 | ]
83 | end
84 |
85 | defp calculate_seconds(nil, _), do: 0
86 | defp calculate_seconds(_, nil), do: 0
87 | defp calculate_seconds(a, b), do: DateTime.diff(a, b)
88 |
89 | defp link_for(%{user: user, name: name}, %{external_id: id}),
90 | do: "https://github.com/#{user}/#{name}/actions/runs/#{id}"
91 | end
92 |
--------------------------------------------------------------------------------
/test/dashy/repositories_test.exs:
--------------------------------------------------------------------------------
1 | defmodule Dashy.RepositoriesTest do
2 | use Dashy.DataCase
3 |
4 | alias Dashy.Repositories
5 |
6 | describe "repositories" do
7 | alias Dashy.Repositories.Repository
8 |
9 | @valid_attrs %{name: "some_name", user: "some_user", branch: "some_branch"}
10 | @update_attrs %{
11 | name: "some_updated_name",
12 | user: "some_updated_user",
13 | branch: "some_updated_branch"
14 | }
15 | @invalid_attrs %{name: nil, user: nil, branch: nil}
16 |
17 | def repository_fixture(attrs \\ %{}) do
18 | {:ok, repository} =
19 | attrs
20 | |> Enum.into(@valid_attrs)
21 | |> Repositories.create_repository()
22 |
23 | repository
24 | end
25 |
26 | test "list_repositories/0 returns all repositories" do
27 | repository = repository_fixture()
28 | assert Repositories.list_repositories() == [repository]
29 | end
30 |
31 | test "get_repository!/1 returns the repository with given id" do
32 | repository = repository_fixture()
33 | assert Repositories.get_repository!(repository.id) == repository
34 | end
35 |
36 | test "create_repository/1 with valid data creates a repository" do
37 | assert {:ok, %Repository{} = repository} = Repositories.create_repository(@valid_attrs)
38 |
39 | assert repository.name == "some_name"
40 | assert repository.user == "some_user"
41 | assert repository.branch == "some_branch"
42 | end
43 |
44 | test "create_repository/1 with invalid data returns error changeset" do
45 | assert {:error, %Ecto.Changeset{}} = Repositories.create_repository(@invalid_attrs)
46 | end
47 |
48 | test "update_repository/2 with valid data updates the repository" do
49 | repository = repository_fixture()
50 |
51 | assert {:ok, %Repository{} = repository} =
52 | Repositories.update_repository(repository, @update_attrs)
53 |
54 | assert repository.name == "some_updated_name"
55 | assert repository.user == "some_updated_user"
56 | assert repository.branch == "some_updated_branch"
57 | end
58 |
59 | test "update_repository/2 with invalid data returns error changeset" do
60 | repository = repository_fixture()
61 |
62 | assert {:error, %Ecto.Changeset{}} =
63 | Repositories.update_repository(repository, @invalid_attrs)
64 |
65 | assert repository == Repositories.get_repository!(repository.id)
66 | end
67 |
68 | test "delete_repository/1 deletes the repository" do
69 | repository = repository_fixture()
70 | assert {:ok, %Repository{}} = Repositories.delete_repository(repository)
71 | assert_raise Ecto.NoResultsError, fn -> Repositories.get_repository!(repository.id) end
72 | end
73 |
74 | test "change_repository/1 returns a repository changeset" do
75 | repository = repository_fixture()
76 | assert %Ecto.Changeset{} = Repositories.change_repository(repository)
77 | end
78 | end
79 | end
80 |
--------------------------------------------------------------------------------
/lib/dashy/repositories.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Repositories do
2 | @moduledoc """
3 | The Repositories context.
4 | """
5 |
6 | import Ecto.Query, warn: false
7 | alias Dashy.Repo
8 |
9 | alias Dashy.Repositories.Repository
10 |
11 | @doc """
12 | Returns the list of repositories.
13 |
14 | ## Examples
15 |
16 | iex> list_repositories()
17 | [%Repository{}, ...]
18 |
19 | """
20 | def list_repositories do
21 | Repo.all(Repository)
22 | end
23 |
24 | @doc """
25 | Gets a single repository.
26 |
27 | Raises `Ecto.NoResultsError` if the Repository does not exist.
28 |
29 | ## Examples
30 |
31 | iex> get_repository!(123)
32 | %Repository{}
33 |
34 | iex> get_repository!(456)
35 | ** (Ecto.NoResultsError)
36 |
37 | """
38 | def get_repository!(id), do: Repo.get!(Repository, id)
39 |
40 | def get_repositories_by_name(name) when is_binary(name) do
41 | name = "%#{name}%"
42 |
43 | from(
44 | r in Repository,
45 | where: like(r.name, ^name)
46 | )
47 | |> Repo.all()
48 | end
49 |
50 | def get_repository_by_user_and_name(user, name) do
51 | from(
52 | r in Repository,
53 | where: r.user == ^user,
54 | where: r.name == ^name
55 | )
56 | |> Repo.one()
57 | end
58 |
59 | @doc """
60 | Creates a repository.
61 |
62 | ## Examples
63 |
64 | iex> create_repository(%{field: value})
65 | {:ok, %Repository{}}
66 |
67 | iex> create_repository(%{field: bad_value})
68 | {:error, %Ecto.Changeset{}}
69 |
70 | """
71 | def create_repository(attrs \\ %{}) do
72 | %Repository{}
73 | |> Repository.changeset(attrs)
74 | |> Repo.insert()
75 | end
76 |
77 | @doc """
78 | Updates a repository.
79 |
80 | ## Examples
81 |
82 | iex> update_repository(repository, %{field: new_value})
83 | {:ok, %Repository{}}
84 |
85 | iex> update_repository(repository, %{field: bad_value})
86 | {:error, %Ecto.Changeset{}}
87 |
88 | """
89 | def update_repository(%Repository{} = repository, attrs) do
90 | repository
91 | |> Repository.changeset(attrs)
92 | |> Repo.update()
93 | end
94 |
95 | @doc """
96 | Deletes a repository.
97 |
98 | ## Examples
99 |
100 | iex> delete_repository(repository)
101 | {:ok, %Repository{}}
102 |
103 | iex> delete_repository(repository)
104 | {:error, %Ecto.Changeset{}}
105 |
106 | """
107 | def delete_repository(%Repository{} = repository) do
108 | Repo.delete(repository)
109 | end
110 |
111 | @doc """
112 | Returns an `%Ecto.Changeset{}` for tracking repository changes.
113 |
114 | ## Examples
115 |
116 | iex> change_repository(repository)
117 | %Ecto.Changeset{data: %Repository{}}
118 |
119 | """
120 | def change_repository(%Repository{} = repository, attrs \\ %{}) do
121 | Repository.changeset(repository, attrs)
122 | end
123 | end
124 |
--------------------------------------------------------------------------------
/lib/dashy/charts/workflow_parts_fake.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Charts.WorkflowPartsFake do
2 | alias Dashy.Charts.Helpers
3 |
4 | def parts(_repo, opts \\ []) do
5 | count = Keyword.get(opts, :count, 50)
6 | fake_parts(DateTime.now!("Etc/UTC"), [], count)
7 | end
8 |
9 | defp fake_parts(_last_run_date, times, 0) do
10 | parts = list_parts()
11 | colors = Helpers.generate_colors(parts |> Enum.count())
12 | %{data: fake_parts_data(times, parts, colors, []), colors: colors, parts: parts}
13 | end
14 |
15 | defp fake_parts(last_run_date, data, n) do
16 | time = DateTime.add(last_run_date, -:rand.uniform(12 * 60 * 60), :second)
17 |
18 | fake_parts(time, [time | data], n - 1)
19 | end
20 |
21 | defp fake_parts_data(_times, [], _colors, data), do: data
22 |
23 | defp fake_parts_data(times, [part | parts], [color | colors], data) do
24 | initial = %{
25 | seconds: :rand.normal(120, 30) |> Float.round(),
26 | data: []
27 | }
28 |
29 | %{data: part_data} =
30 | times
31 | |> Enum.reduce(initial, fn time, acc ->
32 | seconds = acc.seconds + :rand.normal(0, 5)
33 | part_time = DateTime.add(time, :rand.uniform(60), :second)
34 |
35 | %{
36 | seconds: seconds,
37 | data: [
38 | %{
39 | name: part,
40 | time: part_time,
41 | seconds: seconds,
42 | minutes: seconds / 60,
43 | link: 'https://github.com/decidim/decidim/actions/runs/834489205'
44 | }
45 | | acc.data
46 | ]
47 | }
48 | end)
49 |
50 | part_data = build_part(part, part_data, color)
51 |
52 | fake_parts_data(times, parts, colors, [part_data | data])
53 | end
54 |
55 | def build_part(part, part_data, color) do
56 | %{
57 | label: part,
58 | data: part_data,
59 | color: Helpers.build_style_color(color)
60 | }
61 | end
62 |
63 | def list_parts do
64 | [
65 | "[CI] Accountability",
66 | "[CI] Admin",
67 | "[CI] Api",
68 | "[CI] Assemblies",
69 | "[CI] Blogs",
70 | "[CI] Budgets",
71 | "[CI] Comments",
72 | "[CI] Conferences",
73 | "[CI] Consultations",
74 | "[CI] Core",
75 | "[CI] Core (system specs)",
76 | "[CI] Core (unit tests)",
77 | "[CI] Debates",
78 | "[CI] Dev (system specs)",
79 | "[CI] Elections",
80 | "[CI] Elections (system admin)",
81 | "[CI] Elections (system public)",
82 | "[CI] Elections (unit tests)",
83 | "[CI] Forms",
84 | "[CI] Generators",
85 | "[CI] Initiatives",
86 | "[CI] Lint",
87 | "[CI] Main folder",
88 | "[CI] Meetings",
89 | "[CI] Meetings (system admin)",
90 | "[CI] Meetings (system public)",
91 | "[CI] Meetings (unit tests)",
92 | "[CI] Pages",
93 | "[CI] Participatory processes",
94 | "[CI] Proposals (system admin)",
95 | "[CI] Proposals (system public 1)",
96 | "[CI] Proposals (system public)",
97 | "[CI] Proposals (system public2)",
98 | "[CI] Proposals (unit tests)",
99 | "[CI] Security",
100 | "[CI] Sortitions",
101 | "[CI] Surveys",
102 | "[CI] System",
103 | "[CI] Templates",
104 | "[CI] Test",
105 | "[CI] Verifications"
106 | ]
107 | end
108 | end
109 |
--------------------------------------------------------------------------------
/lib/dashy/fetcher.ex:
--------------------------------------------------------------------------------
1 | defmodule Dashy.Fetcher do
2 | alias Dashy.Workflows
3 | alias Dashy.WorkflowRuns
4 | alias Dashy.WorkflowRunJobs
5 | alias Dashy.Fetchers.WorkflowsFetcher
6 | alias Dashy.Fetchers.WorkflowRunsFetcher
7 | alias Dashy.Fetchers.WorkflowRunJobsFetcher
8 |
9 | import Ecto.Query
10 |
11 | alias Dashy.Repo
12 |
13 | @starting_page 1
14 | @minimum_results_number 300
15 |
16 | def update_workflows(repo, opts \\ []) do
17 | fetcher_module = Keyword.get(opts, :with, WorkflowsFetcher)
18 | save_function = &Workflows.create_or_update/1
19 | repo_name = repo_name(repo)
20 | attrs = %{repository_id: repo.id}
21 |
22 | save_results(fetcher_module.get(repo_name), save_function, attrs)
23 | end
24 |
25 | def update_workflow_runs(repo, opts \\ []) do
26 | fetcher_module = Keyword.get(opts, :with, WorkflowRunsFetcher)
27 | save_function = &WorkflowRuns.create_or_update/1
28 | repo_name = repo_name(repo)
29 |
30 | fetch_workflow_runs_and_save(
31 | repo_name,
32 | fetcher_module,
33 | save_function,
34 | %{branch: repo.branch, page: @starting_page},
35 | @minimum_results_number
36 | )
37 | end
38 |
39 | def update_all_workflow_run_jobs(repo) do
40 | repo_name = repo_name(repo)
41 |
42 | from(
43 | r in WorkflowRuns.WorkflowRun,
44 | select: [:external_id]
45 | )
46 | |> Repo.all()
47 | |> Enum.each(fn workflow_run ->
48 | Dashy.Fetcher.update_workflow_run_jobs(repo_name, workflow_run)
49 | end)
50 | end
51 |
52 | def update_workflow_run_jobs(repo_name, workflow_run, opts \\ []) do
53 | fetcher_module = Keyword.get(opts, :with, WorkflowRunJobsFetcher)
54 | save_function = &WorkflowRunJobs.create_or_update/1
55 |
56 | save_results(fetcher_module.get(repo_name, workflow_run.external_id), save_function)
57 | WorkflowRuns.update_from_jobs(workflow_run.external_id)
58 | end
59 |
60 | defp save_results(results, save_function, attrs \\ %{}) do
61 | case results do
62 | {:error, error} ->
63 | [{:fetch_error, error}]
64 |
65 | %{body: results} ->
66 | results
67 | |> Enum.map(fn result ->
68 | save_function.(Map.merge(result, attrs))
69 | end)
70 | end
71 | end
72 |
73 | defp fetch_workflow_runs_and_save(
74 | _repo_name,
75 | _fetcher_module,
76 | _save_function,
77 | _opts,
78 | minimum
79 | )
80 | when minimum <= 0,
81 | do: []
82 |
83 | defp fetch_workflow_runs_and_save(
84 | repo_name,
85 | fetcher_module,
86 | save_function,
87 | %{branch: branch, page: page},
88 | minimum
89 | ) do
90 | results = fetcher_module.get(repo_name, branch, page)
91 |
92 | runs = save_results(results, save_function)
93 |
94 | case runs do
95 | [{:fetch_error, _}] ->
96 | runs
97 |
98 | [] ->
99 | []
100 |
101 | _ ->
102 | runs ++
103 | fetch_workflow_runs_and_save(
104 | repo_name,
105 | fetcher_module,
106 | save_function,
107 | %{branch: branch, page: page + 1},
108 | minimum - (results.body |> Enum.count())
109 | )
110 | end
111 | end
112 |
113 | defp repo_name(repo), do: "#{repo.user}/#{repo.name}"
114 | end
115 |
--------------------------------------------------------------------------------
/infra/index.ts:
--------------------------------------------------------------------------------
1 | import { k8s, pulumi, docker, gcp } from "@codegram/pulumi-utils";
2 |
3 | const host = "dashy.codegram.io";
4 |
5 | /**
6 | * Get a reference to the stack that was used to create
7 | * the genesis Kubernetes cluster. In order to make it work you need to add
8 | * the `clusterStackRef` config value like this:
9 | *
10 | * $ pulumi config set clusterStackRef codegram/genesis-cluster/prod
11 | */
12 | const stackReference = pulumi.getStackReference({
13 | name: pulumi.getValueFromConfig("clusterStackRef"),
14 | });
15 |
16 | /**
17 | * Create a Kubernetes provider that will be used by all resources. This function
18 | * uses the previous `stackReference` outputs to create the provider for the
19 | * Genesis Kubernetes cluster.
20 | */
21 | const kubernetesProvider = k8s.buildProvider({
22 | name: "dashy",
23 | kubeconfig: stackReference.requireOutput("kubeconfig"),
24 | namespace: stackReference.requireOutput("appsNamespaceName"),
25 | });
26 |
27 | const passwordDb = pulumi.createRandomPassword({ name: "dashy-db" });
28 | const { database, user } = gcp.createDatabase({
29 | name: "dashy",
30 | username: "dashy",
31 | password: passwordDb as any,
32 | });
33 |
34 | const databaseUrl = pulumi.interpolate`postgresql://${user.name}:${user.password}@${database.publicIpAddress}:5432/dashy`;
35 | const secretKeyBase = pulumi.createRandomPassword({
36 | name: "dashy-secret-key-base",
37 | length: 128,
38 | });
39 |
40 | const githubToken = pulumi.getSecretFromConfig("githubToken");
41 | const env = [
42 | { name: "DATABASE_URL", value: databaseUrl },
43 | { name: "SECRET_KEY_BASE", value: secretKeyBase },
44 | { name: "GITHUB_TOKEN", value: githubToken },
45 | { name: "HOST", value: host },
46 | ];
47 |
48 | /**
49 | * Create a new docker image. Use the `context` option to specify where
50 | * the `Dockerfile`is located.
51 | *
52 | * NOTE: to make this code work you need to add the following config value:
53 | *
54 | * $ pulumi config set gcpProjectId labs-260007
55 | *
56 | * The reason for that is we are pushing the docker images to Google cloud right now.
57 | */
58 | const dockerImage = docker.buildImage({
59 | name: "dashy",
60 | context: "..",
61 | args: {
62 | databaseUrl,
63 | secretKeyBase,
64 | githubToken,
65 | host,
66 | },
67 | });
68 |
69 | const createJob = k8s.createJob({
70 | name: "dashy-create-db",
71 | command: ["bin/dashy"],
72 | containerArgs: ["eval", '"Dashy.Release.db_create"'],
73 | env,
74 | provider: kubernetesProvider,
75 | dockerImageName: dockerImage.imageName,
76 | });
77 |
78 | const migrateJob = k8s.createJob({
79 | name: "dashy-migrate-db",
80 | command: ["bin/dashy"],
81 | containerArgs: ["eval", '"Dashy.Release.db_migrate"'],
82 | env,
83 | provider: kubernetesProvider,
84 | dockerImageName: dockerImage.imageName,
85 | dependsOn: [createJob],
86 | });
87 |
88 | /**
89 | * Create a Kubernetes application using the previous docker image. Change the `port` and
90 | * `replicas` to match your needs.
91 | *
92 | * This function creates a `Deployment`, `Service` and `Ingress` objects. The application
93 | * will be accessible in dashy.codegram.io"
94 | */
95 | k8s.createApplication({
96 | name: "dashy",
97 | deploymentOptions: {
98 | host: host,
99 | port: 5000,
100 | replicas: 1,
101 | env,
102 | },
103 | dockerImageName: dockerImage.imageName,
104 | provider: kubernetesProvider,
105 | dependsOn: [migrateJob],
106 | });
107 |
--------------------------------------------------------------------------------
/lib/dashy_web/live/repo/repo_live.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.RepoLive do
2 | use Surface.LiveView
3 |
4 | alias DashyWeb.Components.Layout
5 | alias DashyWeb.Components.Card
6 | alias DashyWeb.Components.CardContent
7 | alias DashyWeb.Components.CardTitle
8 | alias DashyWeb.Components.Charts.LastRuns
9 | alias DashyWeb.Components.Charts.Parts
10 |
11 | alias Dashy.Charts.Helpers
12 |
13 | @impl true
14 | def mount(%{"name" => name, "user" => user}, _session, socket) do
15 | Process.send_after(self(), :load_runs, 1000)
16 | Process.send_after(self(), :load_parts, 1000)
17 | parts = []
18 | colors = []
19 | repo = Dashy.Repositories.get_repository_by_user_and_name(user, name)
20 |
21 | {:ok, socket |> assign(repo: repo, parts: parts, colors: colors, uses_fake_data: false)}
22 | end
23 |
24 | @impl true
25 | def render(assigns) do
26 | ~H"""
27 |
55 | """
56 | end
57 |
58 | @impl true
59 | def handle_event("update-runs", _, socket) do
60 | runs = get_runs_module(socket).runs()
61 | {:noreply, socket |> push_event("load-runs", %{data: runs})}
62 | end
63 |
64 | def handle_event("update-parts", _, socket) do
65 | %{parts: parts, data: data, colors: colors} = get_parts_module(socket).parts
66 |
67 | {:noreply,
68 | socket |> assign(parts: parts, colors: colors) |> push_event("load-parts", %{data: data})}
69 | end
70 |
71 | @impl true
72 | def handle_info(:load_runs, socket) do
73 | runs = get_runs_module(socket).runs(socket.assigns.repo)
74 | {:noreply, socket |> push_event("load-runs", %{data: runs})}
75 | end
76 |
77 | @impl true
78 | def handle_info(:load_parts, socket) do
79 | %{parts: parts, data: data, colors: colors} =
80 | get_parts_module(socket).parts(socket.assigns.repo)
81 |
82 | {:noreply,
83 | socket |> assign(parts: parts, colors: colors) |> push_event("load-parts", %{data: data})}
84 | end
85 |
86 | defp get_runs_module(%{assigns: %{uses_fake_data: true}} = _socket),
87 | do: Dashy.Charts.WorkflowRunsFake
88 |
89 | defp get_runs_module(%{assigns: %{uses_fake_data: false}} = _socket),
90 | do: Dashy.Charts.WorkflowRuns
91 |
92 | defp get_parts_module(%{assigns: %{uses_fake_data: true}} = _socket),
93 | do: Dashy.Charts.WorkflowPartsFake
94 |
95 | defp get_parts_module(%{assigns: %{uses_fake_data: false}} = _socket),
96 | do: Dashy.Charts.WorkflowParts
97 | end
98 |
--------------------------------------------------------------------------------
/lib/dashy_web/live/ui_live.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.UILive do
2 | use Surface.LiveView
3 |
4 | alias DashyWeb.Components.Card
5 | alias DashyWeb.Components.CardContent
6 | alias DashyWeb.Components.CardTitle
7 |
8 | alias DashyWeb.Components.Button
9 |
10 | alias DashyWeb.Components.Charts.LastRuns
11 | alias DashyWeb.Components.Charts.Parts
12 | alias Dashy.Charts.Helpers
13 |
14 | @impl true
15 | def mount(_params, _session, socket) do
16 | Process.send_after(self(), :load_runs, 1000)
17 | Process.send_after(self(), :load_parts, 1000)
18 | {:ok, socket |> assign(uses_fake_data: true, parts: [], colors: [])}
19 | end
20 |
21 | @impl true
22 | def render(assigns) do
23 | ~H"""
24 |
25 |
Components
26 |
27 |
Buttons
28 |
29 | Primary Button
30 | Warning Button
31 | Alert Button
32 |
33 |
34 | Primary Button (toggle source)
35 | Warning Button
36 | Alert Button
37 |
38 |
39 |
40 |
41 |
42 |
54 |
55 |
Card
56 |
57 |
58 |
59 | Card content
60 |
61 |
62 |
63 |
64 | """
65 | end
66 |
67 | @impl true
68 | def handle_event("button-event", %{"val" => val}, socket) do
69 | require Logger
70 | Logger.debug(val)
71 | {:noreply, socket}
72 | end
73 |
74 | def handle_event("toggle-source", _, socket) do
75 | uses_fake_data = socket.assigns.uses_fake_data
76 |
77 | with socket <- assign(socket, uses_fake_data: !uses_fake_data),
78 | {:noreply, socket} <- handle_event("update-runs", %{}, socket),
79 | {:noreply, socket} <- handle_event("update-parts", %{}, socket) do
80 | {:noreply, socket}
81 | else
82 | _ -> {:noreply, socket}
83 | end
84 | end
85 |
86 | def handle_event("update-runs", _, socket) do
87 | runs = get_runs_module(socket).runs()
88 | {:noreply, socket |> push_event("load-runs", %{data: runs})}
89 | end
90 |
91 | def handle_event("update-parts", _, socket) do
92 | %{parts: parts, data: data, colors: colors} = get_parts_module(socket).parts
93 |
94 | {:noreply,
95 | socket |> assign(parts: parts, colors: colors) |> push_event("load-parts", %{data: data})}
96 | end
97 |
98 | @impl true
99 | def handle_info(:load_runs, socket) do
100 | runs = get_runs_module(socket).runs()
101 | {:noreply, socket |> push_event("load-runs", %{data: runs})}
102 | end
103 |
104 | @impl true
105 | def handle_info(:load_parts, socket) do
106 | %{parts: parts, data: data, colors: colors} = get_parts_module(socket).parts
107 |
108 | {:noreply,
109 | socket |> assign(parts: parts, colors: colors) |> push_event("load-parts", %{data: data})}
110 | end
111 |
112 | defp get_runs_module(%{assigns: %{uses_fake_data: true}} = _socket),
113 | do: Dashy.Charts.WorkflowRunsFake
114 |
115 | defp get_runs_module(%{assigns: %{uses_fake_data: false}} = _socket),
116 | do: Dashy.Charts.WorkflowRuns
117 |
118 | defp get_parts_module(%{assigns: %{uses_fake_data: true}} = _socket),
119 | do: Dashy.Charts.WorkflowPartsFake
120 |
121 | defp get_parts_module(%{assigns: %{uses_fake_data: false}} = _socket),
122 | do: Dashy.Charts.WorkflowPartsFake
123 | end
124 |
--------------------------------------------------------------------------------
/lib/dashy_web/components/layout/layout_component.ex:
--------------------------------------------------------------------------------
1 | defmodule DashyWeb.Components.Layout do
2 | use Surface.LiveComponent
3 |
4 | alias Dashy.Fetchers.GenServerFetcher
5 |
6 | alias DashyWeb.Router.Helpers, as: Routes
7 |
8 | alias Dashy.Repositories
9 | alias DashyWeb.Components.Button
10 | alias DashyWeb.Components.Modal
11 |
12 | alias Surface.Components.Form
13 |
14 | @doc "The content of the Layout"
15 | slot default, required: true
16 |
17 | prop repos, :list, required: false, default: []
18 | prop show, :boolean, default: false
19 | prop modal, :boolean, default: false
20 |
21 | def render(assigns) do
22 | ~H"""
23 |
24 |
37 | New Repository
38 |
42 |
43 |
68 |
69 |
70 |
71 | """
72 | end
73 |
74 | def handle_event("create-repo", %{"new_repo" => params}, socket) do
75 | case Repositories.create_repository(params) do
76 | {:ok, repository} ->
77 | {:ok, pid} =
78 | DynamicSupervisor.start_child(
79 | Dashy.FetcherSupervisor,
80 | {Dashy.Fetchers.GenServerFetcher, name: String.to_atom("repo_#{repository.id}")}
81 | )
82 |
83 | GenServerFetcher.fetch(pid, repository)
84 |
85 | {:noreply,
86 | redirect(socket, to: Routes.repo_path(socket, :index, repository.user, repository.name))}
87 |
88 | {:error, changeset} ->
89 | socket = assign(socket, errors: changeset.errors)
90 | {:noreply, socket}
91 | end
92 | end
93 |
94 | def handle_event("open-modal", _, socket) do
95 | {:noreply, socket |> assign(modal: true)}
96 | end
97 |
98 | def handle_event("close-modal", _, socket) do
99 | {:noreply, socket |> assign(modal: false)}
100 | end
101 |
102 | def handle_event("change", %{"repo" => %{"name" => name, "show" => show}}, socket) do
103 | repos =
104 | case name |> String.length() do
105 | 0 -> []
106 | _ -> Repositories.get_repositories_by_name(name)
107 | end
108 |
109 | socket = assign(socket, repos: repos, show: show_value(show))
110 | {:noreply, socket}
111 | end
112 |
113 | def handle_event("change", _params, socket) do
114 | {:noreply, socket}
115 | end
116 |
117 | defp show_value("false"), do: false
118 | defp show_value(false), do: false
119 | defp show_value("true"), do: true
120 | defp show_value(true), do: false
121 | end
122 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on:
4 | push:
5 | branches: [main]
6 | pull_request:
7 | branches: [main]
8 |
9 | env:
10 | MIX_ENV: test
11 |
12 | jobs:
13 | elixir-tests:
14 | name: Elixir Tests
15 | runs-on: ubuntu-20.04
16 | env:
17 | DATABASE_HOST: localhost
18 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
19 | services:
20 | postgres:
21 | image: postgres:13.1
22 | env:
23 | POSTGRES_USER: postgres
24 | POSTGRES_PASSWORD: postgres
25 | POSTGRES_DB: database_test
26 | PGDATA: /var/lib/postgresql/data/pgdata
27 | ports:
28 | - "5432:5432"
29 | options: >-
30 | --health-cmd pg_isready
31 | --health-interval 10s
32 | --health-timeout 5s
33 | --health-retries 5
34 | steps:
35 | - name: Install system dependencies
36 | run: |
37 | sudo apt-get install -y imagemagick
38 | - uses: actions/checkout@v2
39 | - name: Copy config templates
40 | working-directory: "config"
41 | run: |
42 | cp dev.local.exs.example dev.local.exs
43 | cp test.local.exs.example test.local.exs
44 | - name: Set up Elixir
45 | uses: erlef/setup-elixir@v1
46 | with:
47 | elixir-version: "1.11.2" # Define the elixir version [required]
48 | otp-version: "23.0" # Define the OTP version [required]
49 | - name: Restore deps cache
50 | uses: actions/cache@v2
51 | with:
52 | path: deps
53 | key: ${{ runner.os }}-deps-${{ hashFiles('**/mix.lock') }}
54 | restore-keys: ${{ runner.os }}-mix-
55 | - name: Restore build cache
56 | uses: actions/cache@v2
57 | with:
58 | path: _build
59 | key: ${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}
60 | - name: Install dependencies
61 | run: mix deps.get
62 | - name: Run tests
63 | run: mix test
64 |
65 | elixir-formatting:
66 | name: Elixir Formatting
67 | runs-on: ubuntu-20.04
68 | steps:
69 | - uses: actions/checkout@v2
70 | - name: Copy config templates
71 | working-directory: "config"
72 | run: |
73 | cp dev.local.exs.example dev.local.exs
74 | cp test.local.exs.example test.local.exs
75 | - name: Set up Elixir
76 | uses: erlef/setup-elixir@v1
77 | with:
78 | elixir-version: "1.11.2" # Define the elixir version [required]
79 | otp-version: "23.0" # Define the OTP version [required]
80 | - name: Restore deps cache
81 | uses: actions/cache@v2
82 | with:
83 | path: deps
84 | key: ${{ runner.os }}-deps-${{ hashFiles('**/mix.lock') }}
85 | restore-keys: ${{ runner.os }}-mix-
86 | - name: Restore build cache
87 | uses: actions/cache@v2
88 | with:
89 | path: _build
90 | key: ${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}
91 | - name: Install dependencies
92 | run: mix deps.get
93 | - name: Check Elixir formatting
94 | run: mix format --check-formatted
95 |
96 | elixir-compile-warnings:
97 | name: Elixir Compile warnings
98 | runs-on: ubuntu-20.04
99 | steps:
100 | - uses: actions/checkout@v2
101 | - name: Copy config templates
102 | working-directory: "config"
103 | run: |
104 | cp dev.local.exs.example dev.local.exs
105 | cp test.local.exs.example test.local.exs
106 | - name: Set up Elixir
107 | uses: erlef/setup-elixir@v1
108 | with:
109 | elixir-version: "1.11.2" # Define the elixir version [required]
110 | otp-version: "23.0" # Define the OTP version [required]
111 | - name: Restore deps cache
112 | uses: actions/cache@v2
113 | with:
114 | path: deps
115 | key: ${{ runner.os }}-deps-${{ hashFiles('**/mix.lock') }}
116 | restore-keys: ${{ runner.os }}-mix-
117 | - name: Restore build cache
118 | uses: actions/cache@v2
119 | with:
120 | path: _build
121 | key: ${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}
122 | - name: Install dependencies
123 | run: mix deps.get
124 | - name: Compile with warnings as errors
125 | run: mix compile --warnings-as-errors --all-warnings
126 |
127 | elixir-credo:
128 | name: Credo
129 | runs-on: ubuntu-20.04
130 | steps:
131 | - uses: actions/checkout@v2
132 | - name: Copy config templates
133 | working-directory: "config"
134 | run: |
135 | cp dev.local.exs.example dev.local.exs
136 | cp test.local.exs.example test.local.exs
137 | - name: Set up Elixir
138 | uses: erlef/setup-elixir@v1
139 | with:
140 | elixir-version: "1.11.2" # Define the elixir version [required]
141 | otp-version: "23.0" # Define the OTP version [required]
142 | - name: Restore deps cache
143 | uses: actions/cache@v2
144 | with:
145 | path: deps
146 | key: ${{ runner.os }}-deps-${{ hashFiles('**/mix.lock') }}
147 | restore-keys: ${{ runner.os }}-mix-
148 | - name: Restore build cache
149 | uses: actions/cache@v2
150 | with:
151 | path: _build
152 | key: ${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}
153 | - name: Install dependencies
154 | run: mix deps.get
155 | - name: Check Credo suggestions
156 | run: mix credo
157 |
158 | javascript-lint:
159 | name: JavaScript Lint
160 | runs-on: ubuntu-20.04
161 | steps:
162 | - uses: actions/checkout@v2
163 | - name: Copy config templates
164 | working-directory: "config"
165 | run: |
166 | cp dev.local.exs.example dev.local.exs
167 | cp test.local.exs.example test.local.exs
168 | - name: Set up Elixir
169 | uses: erlef/setup-elixir@v1
170 | with:
171 | elixir-version: "1.11.2" # Define the elixir version [required]
172 | otp-version: "23.0" # Define the OTP version [required]
173 | - name: Restore deps cache
174 | uses: actions/cache@v2
175 | with:
176 | path: deps
177 | key: ${{ runner.os }}-deps-${{ hashFiles('**/mix.lock') }}
178 | restore-keys: ${{ runner.os }}-mix-
179 | - name: Restore build cache
180 | uses: actions/cache@v2
181 | with:
182 | path: _build
183 | key: ${{ runner.os }}-build-${{ hashFiles('**/mix.lock') }}
184 | - name: Install dependencies
185 | run: mix deps.get
186 | - name: Set up node
187 | uses: actions/setup-node@v2.1.5
188 | with:
189 | node-version: "15"
190 | - name: Restore npm cache
191 | uses: actions/cache@v2
192 | with:
193 | path: assets/node_modules
194 | key: ${{ runner.os }}-npm-${{ hashFiles('**/package-lock.json') }}
195 | restore-keys: ${{ runner.os }}-npm-
196 | - name: Install NodeJS deps
197 | run: npm ci
198 | working-directory: ./assets
199 | - run: npm run lint
200 | working-directory: ./assets
201 |
--------------------------------------------------------------------------------
/.credo.exs:
--------------------------------------------------------------------------------
1 | # This file contains the configuration for Credo and you are probably reading
2 | # this after creating it with `mix credo.gen.config`.
3 | #
4 | # If you find anything wrong or unclear in this file, please report an
5 | # issue on GitHub: https://github.com/rrrene/credo/issues
6 | #
7 | %{
8 | #
9 | # You can have as many configs as you like in the `configs:` field.
10 | configs: [
11 | %{
12 | #
13 | # Run any config using `mix credo -C `. If no config name is given
14 | # "default" is used.
15 | #
16 | name: "default",
17 | #
18 | # These are the files included in the analysis:
19 | files: %{
20 | #
21 | # You can give explicit globs or simply directories.
22 | # In the latter case `**/*.{ex,exs}` will be used.
23 | #
24 | included: [
25 | "lib/",
26 | "src/",
27 | "test/",
28 | "web/",
29 | "apps/*/lib/",
30 | "apps/*/src/",
31 | "apps/*/test/",
32 | "apps/*/web/"
33 | ],
34 | excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"]
35 | },
36 | #
37 | # Load and configure plugins here:
38 | #
39 | plugins: [],
40 | #
41 | # If you create your own checks, you must specify the source files for
42 | # them here, so they can be loaded by Credo before running the analysis.
43 | #
44 | requires: [],
45 | #
46 | # If you want to enforce a style guide and need a more traditional linting
47 | # experience, you can change `strict` to `true` below:
48 | #
49 | strict: false,
50 | #
51 | # To modify the timeout for parsing files, change this value:
52 | #
53 | parse_timeout: 5000,
54 | #
55 | # If you want to use uncolored output by default, you can change `color`
56 | # to `false` below:
57 | #
58 | color: true,
59 | #
60 | # You can customize the parameters of any check by adding a second element
61 | # to the tuple.
62 | #
63 | # To disable a check put `false` as second element:
64 | #
65 | # {Credo.Check.Design.DuplicatedCode, false}
66 | #
67 | checks: [
68 | #
69 | ## Consistency Checks
70 | #
71 | {Credo.Check.Consistency.ExceptionNames, []},
72 | {Credo.Check.Consistency.LineEndings, []},
73 | {Credo.Check.Consistency.ParameterPatternMatching, []},
74 | {Credo.Check.Consistency.SpaceAroundOperators, []},
75 | {Credo.Check.Consistency.SpaceInParentheses, []},
76 | {Credo.Check.Consistency.TabsOrSpaces, []},
77 |
78 | #
79 | ## Design Checks
80 | #
81 | # You can customize the priority of any check
82 | # Priority values are: `low, normal, high, higher`
83 | #
84 | {Credo.Check.Design.AliasUsage,
85 | [priority: :low, if_nested_deeper_than: 2, if_called_more_often_than: 0]},
86 | # You can also customize the exit_status of each check.
87 | # If you don't want TODO comments to cause `mix credo` to fail, just
88 | # set this value to 0 (zero).
89 | #
90 | {Credo.Check.Design.TagTODO, [exit_status: 2]},
91 | {Credo.Check.Design.TagFIXME, []},
92 |
93 | #
94 | ## Readability Checks
95 | #
96 | {Credo.Check.Readability.AliasOrder, []},
97 | {Credo.Check.Readability.FunctionNames, []},
98 | {Credo.Check.Readability.LargeNumbers, []},
99 | {Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},
100 | {Credo.Check.Readability.ModuleAttributeNames, []},
101 | {Credo.Check.Readability.ModuleDoc, [priority: :low]},
102 | {Credo.Check.Readability.ModuleNames, []},
103 | {Credo.Check.Readability.ParenthesesInCondition, []},
104 | {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
105 | {Credo.Check.Readability.PredicateFunctionNames, []},
106 | {Credo.Check.Readability.PreferImplicitTry, []},
107 | {Credo.Check.Readability.RedundantBlankLines, []},
108 | {Credo.Check.Readability.Semicolons, []},
109 | {Credo.Check.Readability.SpaceAfterCommas, []},
110 | {Credo.Check.Readability.StringSigils, []},
111 | {Credo.Check.Readability.TrailingBlankLine, []},
112 | {Credo.Check.Readability.TrailingWhiteSpace, []},
113 | {Credo.Check.Readability.UnnecessaryAliasExpansion, []},
114 | {Credo.Check.Readability.VariableNames, []},
115 |
116 | #
117 | ## Refactoring Opportunities
118 | #
119 | {Credo.Check.Refactor.CondStatements, []},
120 | {Credo.Check.Refactor.CyclomaticComplexity, []},
121 | {Credo.Check.Refactor.FunctionArity, []},
122 | {Credo.Check.Refactor.LongQuoteBlocks, []},
123 | # {Credo.Check.Refactor.MapInto, []},
124 | {Credo.Check.Refactor.MatchInCondition, []},
125 | {Credo.Check.Refactor.NegatedConditionsInUnless, []},
126 | {Credo.Check.Refactor.NegatedConditionsWithElse, []},
127 | {Credo.Check.Refactor.Nesting, []},
128 | {Credo.Check.Refactor.UnlessWithElse, []},
129 | {Credo.Check.Refactor.WithClauses, []},
130 |
131 | #
132 | ## Warnings
133 | #
134 | {Credo.Check.Warning.ApplicationConfigInModuleAttribute, []},
135 | {Credo.Check.Warning.BoolOperationOnSameValues, []},
136 | {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []},
137 | {Credo.Check.Warning.IExPry, []},
138 | {Credo.Check.Warning.IoInspect, []},
139 | # {Credo.Check.Warning.LazyLogging, []},
140 | {Credo.Check.Warning.MixEnv, false},
141 | {Credo.Check.Warning.OperationOnSameValues, []},
142 | {Credo.Check.Warning.OperationWithConstantResult, []},
143 | {Credo.Check.Warning.RaiseInsideRescue, []},
144 | {Credo.Check.Warning.UnusedEnumOperation, []},
145 | {Credo.Check.Warning.UnusedFileOperation, []},
146 | {Credo.Check.Warning.UnusedKeywordOperation, []},
147 | {Credo.Check.Warning.UnusedListOperation, []},
148 | {Credo.Check.Warning.UnusedPathOperation, []},
149 | {Credo.Check.Warning.UnusedRegexOperation, []},
150 | {Credo.Check.Warning.UnusedStringOperation, []},
151 | {Credo.Check.Warning.UnusedTupleOperation, []},
152 | {Credo.Check.Warning.UnsafeExec, []},
153 |
154 | #
155 | # Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`)
156 |
157 | #
158 | # Controversial and experimental checks (opt-in, just replace `false` with `[]`)
159 | #
160 | {Credo.Check.Consistency.MultiAliasImportRequireUse, false},
161 | {Credo.Check.Consistency.UnusedVariableNames, false},
162 | {Credo.Check.Design.DuplicatedCode, false},
163 | {Credo.Check.Readability.AliasAs, false},
164 | {Credo.Check.Readability.BlockPipe, false},
165 | {Credo.Check.Readability.ImplTrue, false},
166 | {Credo.Check.Readability.MultiAlias, false},
167 | {Credo.Check.Readability.SeparateAliasRequire, false},
168 | {Credo.Check.Readability.SinglePipe, false},
169 | {Credo.Check.Readability.Specs, false},
170 | {Credo.Check.Readability.StrictModuleLayout, false},
171 | {Credo.Check.Readability.WithCustomTaggedTuple, false},
172 | {Credo.Check.Refactor.ABCSize, false},
173 | {Credo.Check.Refactor.AppendSingleItem, false},
174 | {Credo.Check.Refactor.DoubleBooleanNegation, false},
175 | {Credo.Check.Refactor.ModuleDependencies, false},
176 | {Credo.Check.Refactor.NegatedIsNil, false},
177 | {Credo.Check.Refactor.PipeChainStart, false},
178 | {Credo.Check.Refactor.VariableRebinding, false},
179 | {Credo.Check.Warning.LeakyEnvironment, false},
180 | {Credo.Check.Warning.MapGetUnsafePass, false},
181 | {Credo.Check.Warning.UnsafeToAtom, false}
182 |
183 | #
184 | # Custom checks can be created using `mix credo.gen.check`.
185 | #
186 | ]
187 | }
188 | ]
189 | }
190 |
--------------------------------------------------------------------------------
/lib/dashy_web/live/graphs.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | Chart.js
7 |
8 |
9 |
10 |
11 |
44 |
45 |
46 |
47 |
56 |
57 |
111 |
112 |
199 |
200 |
324 |
325 |
326 |
--------------------------------------------------------------------------------
/mix.lock:
--------------------------------------------------------------------------------
1 | %{
2 | "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"},
3 | "certifi": {:hex, :certifi, "2.6.1", "dbab8e5e155a0763eea978c913ca280a6b544bfa115633fa20249c3d396d9493", [:rebar3], [], "hexpm", "524c97b4991b3849dd5c17a631223896272c6b0af446778ba4675a1dff53bb7e"},
4 | "connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"},
5 | "cowboy": {:hex, :cowboy, "2.9.0", "865dd8b6607e14cf03282e10e934023a1bd8be6f6bacf921a7e2a96d800cd452", [:make, :rebar3], [{:cowlib, "2.11.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "2c729f934b4e1aa149aff882f57c6372c15399a20d54f65c8d67bef583021bde"},
6 | "cowboy_telemetry": {:hex, :cowboy_telemetry, "0.3.1", "ebd1a1d7aff97f27c66654e78ece187abdc646992714164380d8a041eda16754", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "3a6efd3366130eab84ca372cbd4a7d3c3a97bdfcfb4911233b035d117063f0af"},
7 | "cowlib": {:hex, :cowlib, "2.11.0", "0b9ff9c346629256c42ebe1eeb769a83c6cb771a6ee5960bd110ab0b9b872063", [:make, :rebar3], [], "hexpm", "2b3e9da0b21c4565751a6d4901c20d1b4cc25cbb7fd50d91d2ab6dd287bc86a9"},
8 | "credo": {:hex, :credo, "1.5.5", "e8f422026f553bc3bebb81c8e8bf1932f498ca03339856c7fec63d3faac8424b", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "dd8623ab7091956a855dc9f3062486add9c52d310dfd62748779c4315d8247de"},
9 | "db_connection": {:hex, :db_connection, "2.4.0", "d04b1b73795dae60cead94189f1b8a51cc9e1f911c234cc23074017c43c031e5", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "ad416c21ad9f61b3103d254a71b63696ecadb6a917b36f563921e0de00d7d7c8"},
10 | "decimal": {:hex, :decimal, "2.0.0", "a78296e617b0f5dd4c6caf57c714431347912ffb1d0842e998e9792b5642d697", [:mix], [], "hexpm", "34666e9c55dea81013e77d9d87370fe6cb6291d1ef32f46a1600230b1d44f577"},
11 | "dialyxir": {:hex, :dialyxir, "1.1.0", "c5aab0d6e71e5522e77beff7ba9e08f8e02bad90dfbeffae60eaf0cb47e29488", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "07ea8e49c45f15264ebe6d5b93799d4dd56a44036cf42d0ad9c960bc266c0b9a"},
12 | "earmark": {:hex, :earmark, "1.4.14", "d04572cef64dd92726a97d92d714e38d6e130b024ea1b3f8a56e7de66ec04e50", [:mix], [{:earmark_parser, ">= 1.4.12", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "df338b8b1852ee425180b276c56c6941cb12220e04fe8718fe4acbdd35fd699f"},
13 | "earmark_parser": {:hex, :earmark_parser, "1.4.13", "0c98163e7d04a15feb62000e1a891489feb29f3d10cb57d4f845c405852bbef8", [:mix], [], "hexpm", "d602c26af3a0af43d2f2645613f65841657ad6efc9f0e361c3b6c06b578214ba"},
14 | "ecto": {:hex, :ecto, "3.6.1", "7bb317e3fd0179ad725069fd0fe8a28ebe48fec6282e964ea502e4deccb0bd0f", [:mix], [{:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "cbb3294a990447b19f0725488a749f8cf806374e0d9d0dffc45d61e7aeaf6553"},
15 | "ecto_sql": {:hex, :ecto_sql, "3.6.1", "8774dc3fc0ff7b6be510858b99883640f990c0736b8ab54588f9a0c91807f909", [:mix], [{:db_connection, "~> 2.2", [hex: :db_connection, repo: "hexpm", optional: false]}, {:ecto, "~> 3.6.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:myxql, "~> 0.4.0 or ~> 0.5.0", [hex: :myxql, repo: "hexpm", optional: true]}, {:postgrex, "~> 0.15.0 or ~> 1.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:tds, "~> 2.1.1", [hex: :tds, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "66f35c3f2d5978b6bffebd1e6351ab8c9d6b68650d62abd1ab8d149de40e0779"},
16 | "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
17 | "ex_doc": {:hex, :ex_doc, "0.24.2", "e4c26603830c1a2286dae45f4412a4d1980e1e89dc779fcd0181ed1d5a05c8d9", [:mix], [{:earmark_parser, "~> 1.4.0", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "e134e1d9e821b8d9e4244687fb2ace58d479b67b282de5158333b0d57c6fb7da"},
18 | "ex_machina": {:hex, :ex_machina, "2.7.0", "b792cc3127fd0680fecdb6299235b4727a4944a09ff0fa904cc639272cd92dc7", [:mix], [{:ecto, "~> 2.2 or ~> 3.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:ecto_sql, "~> 3.0", [hex: :ecto_sql, repo: "hexpm", optional: true]}], "hexpm", "419aa7a39bde11894c87a615c4ecaa52d8f107bbdd81d810465186f783245bf8"},
19 | "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
20 | "floki": {:hex, :floki, "0.30.1", "75d35526d3a1459920b6e87fdbc2e0b8a3670f965dd0903708d2b267e0904c55", [:mix], [{:html_entities, "~> 0.5.0", [hex: :html_entities, repo: "hexpm", optional: false]}], "hexpm", "e9c03524447d1c4cbfccd672d739b8c18453eee377846b119d4fd71b1a176bb8"},
21 | "gettext": {:hex, :gettext, "0.18.2", "7df3ea191bb56c0309c00a783334b288d08a879f53a7014341284635850a6e55", [:mix], [], "hexpm", "f9f537b13d4fdd30f3039d33cb80144c3aa1f8d9698e47d7bcbcc8df93b1f5c5"},
22 | "hackney": {:hex, :hackney, "1.17.4", "99da4674592504d3fb0cfef0db84c3ba02b4508bae2dff8c0108baa0d6e0977c", [:rebar3], [{:certifi, "~>2.6.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "~>6.1.0", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "~>1.0.0", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "~>1.1", [hex: :mimerl, repo: "hexpm", optional: false]}, {:parse_trans, "3.3.1", [hex: :parse_trans, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "~>1.1.0", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}, {:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "de16ff4996556c8548d512f4dbe22dd58a587bf3332e7fd362430a7ef3986b16"},
23 | "html_entities": {:hex, :html_entities, "0.5.2", "9e47e70598da7de2a9ff6af8758399251db6dbb7eebe2b013f2bbd2515895c3c", [:mix], [], "hexpm", "c53ba390403485615623b9531e97696f076ed415e8d8058b1dbaa28181f4fdcc"},
24 | "httpoison": {:hex, :httpoison, "1.8.0", "6b85dea15820b7804ef607ff78406ab449dd78bed923a49c7160e1886e987a3d", [:mix], [{:hackney, "~> 1.17", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm", "28089eaa98cf90c66265b6b5ad87c59a3729bea2e74e9d08f9b51eb9729b3c3a"},
25 | "idna": {:hex, :idna, "6.1.1", "8a63070e9f7d0c62eb9d9fcb360a7de382448200fbbd1b106cc96d3d8099df8d", [:rebar3], [{:unicode_util_compat, "~>0.7.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm", "92376eb7894412ed19ac475e4a86f7b413c1b9fbb5bd16dccd57934157944cea"},
26 | "jason": {:hex, :jason, "1.2.2", "ba43e3f2709fd1aa1dce90aaabfd039d000469c05c56f0b8e31978e03fa39052", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "18a228f5f0058ee183f29f9eae0805c6e59d61c3b006760668d8d18ff0d12179"},
27 | "makeup": {:hex, :makeup, "1.0.5", "d5a830bc42c9800ce07dd97fa94669dfb93d3bf5fcf6ea7a0c67b2e0e4a7f26c", [:mix], [{:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "cfa158c02d3f5c0c665d0af11512fed3fba0144cf1aadee0f2ce17747fba2ca9"},
28 | "makeup_elixir": {:hex, :makeup_elixir, "0.15.1", "b5888c880d17d1cc3e598f05cdb5b5a91b7b17ac4eaf5f297cb697663a1094dd", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.1", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "db68c173234b07ab2a07f645a5acdc117b9f99d69ebf521821d89690ae6c6ec8"},
29 | "makeup_erlang": {:hex, :makeup_erlang, "0.1.1", "3fcb7f09eb9d98dc4d208f49cc955a34218fc41ff6b84df7c75b3e6e533cc65f", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "174d0809e98a4ef0b3309256cbf97101c6ec01c4ab0b23e926a9e17df2077cbb"},
30 | "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm", "69b09adddc4f74a40716ae54d140f93beb0fb8978d8636eaded0c31b6f099f16"},
31 | "mime": {:hex, :mime, "1.6.0", "dabde576a497cef4bbdd60aceee8160e02a6c89250d6c0b29e56c0dfb00db3d2", [:mix], [], "hexpm", "31a1a8613f8321143dde1dafc36006a17d28d02bdfecb9e95a880fa7aabd19a7"},
32 | "mimerl": {:hex, :mimerl, "1.2.0", "67e2d3f571088d5cfd3e550c383094b47159f3eee8ffa08e64106cdf5e981be3", [:rebar3], [], "hexpm", "f278585650aa581986264638ebf698f8bb19df297f66ad91b18910dfc6e19323"},
33 | "mix_test_watch": {:hex, :mix_test_watch, "1.0.2", "34900184cbbbc6b6ed616ed3a8ea9b791f9fd2088419352a6d3200525637f785", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "47ac558d8b06f684773972c6d04fcc15590abdb97aeb7666da19fcbfdc441a07"},
34 | "nimble_parsec": {:hex, :nimble_parsec, "1.1.0", "3a6fca1550363552e54c216debb6a9e95bd8d32348938e13de5eda962c0d7f89", [:mix], [], "hexpm", "08eb32d66b706e913ff748f11694b17981c0b04a33ef470e33e11b3d3ac8f54b"},
35 | "parse_trans": {:hex, :parse_trans, "3.3.1", "16328ab840cc09919bd10dab29e431da3af9e9e7e7e6f0089dd5a2d2820011d8", [:rebar3], [], "hexpm", "07cd9577885f56362d414e8c4c4e6bdf10d43a8767abb92d24cbe8b24c54888b"},
36 | "phoenix": {:hex, :phoenix, "1.5.9", "a6368d36cfd59d917b37c44386e01315bc89f7609a10a45a22f47c007edf2597", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 2.13 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 1.0 or ~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.1.2 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7e4bce20a67c012f1fbb0af90e5da49fa7bf0d34e3a067795703b74aef75427d"},
37 | "phoenix_ecto": {:hex, :phoenix_ecto, "4.2.1", "13f124cf0a3ce0f1948cf24654c7b9f2347169ff75c1123f44674afee6af3b03", [:mix], [{:ecto, "~> 3.0", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14.2 or ~> 2.15", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "478a1bae899cac0a6e02be1deec7e2944b7754c04e7d4107fc5a517f877743c0"},
38 | "phoenix_html": {:hex, :phoenix_html, "2.14.3", "51f720d0d543e4e157ff06b65de38e13303d5778a7919bcc696599e5934271b8", [:mix], [{:plug, "~> 1.5", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "efd697a7fff35a13eeeb6b43db884705cba353a1a41d127d118fda5f90c8e80f"},
39 | "phoenix_live_dashboard": {:hex, :phoenix_live_dashboard, "0.4.0", "87990e68b60213d7487e65814046f9a2bed4a67886c943270125913499b3e5c3", [:mix], [{:ecto_psql_extras, "~> 0.4.1 or ~> 0.5", [hex: :ecto_psql_extras, repo: "hexpm", optional: true]}, {:phoenix_html, "~> 2.14.1 or ~> 2.15", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.15.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.4.0 or ~> 0.5.0 or ~> 0.6.0", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "8d52149e58188e9e4497cc0d8900ab94d9b66f96998ec38c47c7a4f8f4f50e57"},
40 | "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.3.1", "9eba6ad16bd80c45f338b2059c7b255ce30784d76f4181304e7b78640e5a7513", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.4", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm", "f3ae26b5abb85a1cb2bc8bb199e29fbcefb34259e469b31fe0c6323f2175a5ef"},
41 | "phoenix_live_view": {:hex, :phoenix_live_view, "0.15.5", "153f15022ff03162201cfbd3de73115f3a6e868bc8a3c07b86a8e984de6a57e2", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix, "~> 1.5.7", [hex: :phoenix, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.14", [hex: :phoenix_html, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.2 or ~> 0.5", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "00c80cf27365bdeb44c694b1dc8cf950b4b26141307df340d39a9be47d8dc1ef"},
42 | "phoenix_pubsub": {:hex, :phoenix_pubsub, "2.0.0", "a1ae76717bb168cdeb10ec9d92d1480fec99e3080f011402c0a2d68d47395ffb", [:mix], [], "hexpm", "c52d948c4f261577b9c6fa804be91884b381a7f8f18450c5045975435350f771"},
43 | "plug": {:hex, :plug, "1.11.1", "f2992bac66fdae679453c9e86134a4201f6f43a687d8ff1cd1b2862d53c80259", [:mix], [{:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "23524e4fefbb587c11f0833b3910bfb414bf2e2534d61928e920f54e3a1b881f"},
44 | "plug_cowboy": {:hex, :plug_cowboy, "2.5.0", "51c998f788c4e68fc9f947a5eba8c215fbb1d63a520f7604134cab0270ea6513", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "5b2c8925a5e2587446f33810a58c01e66b3c345652eeec809b76ba007acde71a"},
45 | "plug_crypto": {:hex, :plug_crypto, "1.2.2", "05654514ac717ff3a1843204b424477d9e60c143406aa94daf2274fdd280794d", [:mix], [], "hexpm", "87631c7ad914a5a445f0a3809f99b079113ae4ed4b867348dd9eec288cecb6db"},
46 | "postgrex": {:hex, :postgrex, "0.15.9", "46f8fe6f25711aeb861c4d0ae09780facfdf3adbd2fb5594ead61504dd489bda", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}], "hexpm", "610719103e4cb2223d4ab78f9f0f3e720320eeca6011415ab4137ddef730adee"},
47 | "ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"},
48 | "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.6", "cf344f5692c82d2cd7554f5ec8fd961548d4fd09e7d22f5b62482e5aeaebd4b0", [:make, :mix, :rebar3], [], "hexpm", "bdb0d2471f453c88ff3908e7686f86f9be327d065cc1ec16fa4540197ea04680"},
49 | "surface": {:hex, :surface, "0.4.0", "1e93db686afd9abe79647278ebe15d6f595ba5b80253a0d4b3f00d530442fbdc", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 0.5 or ~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}, {:phoenix_live_view, "~> 0.15.0", [hex: :phoenix_live_view, repo: "hexpm", optional: false]}], "hexpm", "610517f148cb3231a4b55d84f68888737fbda68245a1fc1f193befca15a19fc1"},
50 | "telemetry": {:hex, :telemetry, "0.4.3", "a06428a514bdbc63293cd9a6263aad00ddeb66f608163bdec7c8995784080818", [:rebar3], [], "hexpm", "eb72b8365ffda5bed68a620d1da88525e326cb82a75ee61354fc24b844768041"},
51 | "telemetry_metrics": {:hex, :telemetry_metrics, "0.6.0", "da9d49ee7e6bb1c259d36ce6539cd45ae14d81247a2b0c90edf55e2b50507f7b", [:mix], [{:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "5cfe67ad464b243835512aa44321cee91faed6ea868d7fb761d7016e02915c3d"},
52 | "telemetry_poller": {:hex, :telemetry_poller, "0.5.1", "21071cc2e536810bac5628b935521ff3e28f0303e770951158c73eaaa01e962a", [:rebar3], [{:telemetry, "~> 0.4", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "4cab72069210bc6e7a080cec9afffad1b33370149ed5d379b81c7c5f0c663fd4"},
53 | "unicode_util_compat": {:hex, :unicode_util_compat, "0.7.0", "bc84380c9ab48177092f43ac89e4dfa2c6d62b40b8bd132b1059ecc7232f9a78", [:rebar3], [], "hexpm", "25eee6d67df61960cf6a794239566599b09e17e668d3700247bc498638152521"},
54 | }
55 |
--------------------------------------------------------------------------------