├── .tool-versions ├── mix.lock.license ├── .tool-versions.license ├── src ├── ash_ops_query.erl.license ├── ash_ops_query.peg.license ├── ash_ops_query.peg └── ash_ops_query.erl ├── documentation └── dsls │ ├── DSL-AshOps.md.license │ └── DSL-AshOps.md ├── config ├── dev.exs ├── prod.exs ├── runtime.exs ├── test.exs └── config.exs ├── test ├── test_helper.exs ├── ash_ops_test.exs ├── support │ ├── example │ │ ├── actor.ex │ │ └── post.ex │ └── example.ex └── ash_ops │ ├── task │ ├── action_test.exs │ ├── destroy_test.exs │ ├── update_test.exs │ ├── create_test.exs │ ├── list_test.exs │ └── get_test.exs │ └── query_lang_test.exs ├── .check.exs ├── .github ├── dependabot.yml └── workflows │ └── elixir.yml ├── .doctor.exs ├── .formatter.exs ├── .gitignore ├── LICENSES └── MIT.txt ├── lib ├── ash_ops │ ├── info.ex │ ├── verifier │ │ └── verify_task.ex │ ├── entity │ │ ├── create.ex │ │ ├── list.ex │ │ ├── action.ex │ │ ├── get.ex │ │ ├── update.ex │ │ └── destroy.ex │ ├── task │ │ ├── get.ex │ │ ├── destroy.ex │ │ ├── action.ex │ │ ├── list.ex │ │ ├── common.ex │ │ ├── types.ex │ │ ├── create.ex │ │ ├── update.ex │ │ └── arg_schema.ex │ ├── transformer │ │ └── prepare_task.ex │ └── query_lang.ex ├── mix │ └── tasks │ │ └── ash_ops.install.ex └── ash_ops.ex ├── README.md ├── CHANGELOG.md ├── mix.exs └── mix.lock /.tool-versions: -------------------------------------------------------------------------------- 1 | erlang 27.2.4 2 | elixir 1.18.2 3 | pipx 1.8.0 4 | -------------------------------------------------------------------------------- /mix.lock.license: -------------------------------------------------------------------------------- 1 | SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | 3 | SPDX-License-Identifier: MIT 4 | -------------------------------------------------------------------------------- /.tool-versions.license: -------------------------------------------------------------------------------- 1 | SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | 3 | SPDX-License-Identifier: MIT 4 | -------------------------------------------------------------------------------- /src/ash_ops_query.erl.license: -------------------------------------------------------------------------------- 1 | SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | 3 | SPDX-License-Identifier: MIT 4 | -------------------------------------------------------------------------------- /src/ash_ops_query.peg.license: -------------------------------------------------------------------------------- 1 | SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | 3 | SPDX-License-Identifier: MIT 4 | -------------------------------------------------------------------------------- /documentation/dsls/DSL-AshOps.md.license: -------------------------------------------------------------------------------- 1 | SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | 3 | SPDX-License-Identifier: MIT 4 | -------------------------------------------------------------------------------- /config/dev.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | import Config 6 | -------------------------------------------------------------------------------- /config/prod.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | import Config 6 | -------------------------------------------------------------------------------- /config/runtime.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | import Config 6 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | ExUnit.start() 6 | -------------------------------------------------------------------------------- /config/test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | import Config 6 | 7 | config :logger, level: :warning 8 | config :ash, disable_async?: true 9 | -------------------------------------------------------------------------------- /test/ash_ops_test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOpsTest do 6 | @moduledoc false 7 | use ExUnit.Case 8 | doctest AshOps 9 | end 10 | -------------------------------------------------------------------------------- /.check.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | [ 6 | tools: [ 7 | {:sobelow, "mix sobelow -i Config.HTTPS --exit"}, 8 | {:spark_formatter, "mix spark.formatter --check"}, 9 | {:spark_cheat_sheets, "mix spark.cheat_sheets --check"}, 10 | {:reuse, command: ["pipx", "run", "reuse", "lint", "-q"]} 11 | ] 12 | ] 13 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | --- 6 | updates: 7 | - directory: / 8 | groups: 9 | dev-dependencies: 10 | dependency-type: development 11 | production-dependencies: 12 | dependency-type: production 13 | package-ecosystem: mix 14 | schedule: 15 | interval: monthly 16 | versioning-strategy: lockfile-only 17 | version: 2 18 | -------------------------------------------------------------------------------- /.github/workflows/elixir.yml: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | name: Ash CI 6 | 7 | on: 8 | push: 9 | tags: 10 | - "v*" 11 | branches: [main] 12 | pull_request: 13 | branches: [main] 14 | jobs: 15 | ash-ci: 16 | uses: ash-project/ash/.github/workflows/ash-ci.yml@main 17 | secrets: 18 | HEX_API_KEY: ${{ secrets.HEX_API_KEY }} 19 | with: 20 | spark-formatter: true 21 | postgres: false 22 | igniter-upgrade: false 23 | reuse: true 24 | -------------------------------------------------------------------------------- /test/support/example/actor.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule Example.Actor do 6 | @moduledoc false 7 | use Ash.Resource, 8 | data_layer: Ash.DataLayer.Ets, 9 | domain: Example 10 | 11 | actions do 12 | defaults [:read, :destroy, create: :*, update: :*] 13 | end 14 | 15 | attributes do 16 | uuid_v7_primary_key :id 17 | attribute :is_good, :boolean, allow_nil?: false, public?: true 18 | end 19 | 20 | ets do 21 | table :actor 22 | private? true 23 | end 24 | end 25 | -------------------------------------------------------------------------------- /.doctor.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | %Doctor.Config{ 6 | ignore_modules: [ 7 | ~r/^Inspect\./, 8 | ~r/^Example/, 9 | AshOps.Info 10 | ], 11 | ignore_paths: [], 12 | min_module_doc_coverage: 40, 13 | min_module_spec_coverage: 0, 14 | min_overall_doc_coverage: 50, 15 | min_overall_spec_coverage: 0, 16 | min_overall_moduledoc_coverage: 100, 17 | exception_moduledoc_required: true, 18 | raise: false, 19 | reporter: Doctor.Reporters.Full, 20 | struct_type_spec_required: true, 21 | umbrella: false 22 | } 23 | -------------------------------------------------------------------------------- /.formatter.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | spark_locals_without_parens = [ 6 | action: 3, 7 | action: 4, 8 | arguments: 1, 9 | create: 3, 10 | create: 4, 11 | description: 1, 12 | destroy: 3, 13 | destroy: 4, 14 | get: 3, 15 | get: 4, 16 | list: 3, 17 | list: 4, 18 | prefix: 1, 19 | read_action: 1, 20 | update: 3, 21 | update: 4 22 | ] 23 | 24 | [ 25 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"], 26 | plugins: [Spark.Formatter], 27 | import_deps: [:ash], 28 | locals_without_parens: spark_locals_without_parens, 29 | export: [ 30 | locals_without_parens: spark_locals_without_parens 31 | ] 32 | ] 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | # The directory Mix will write compiled artifacts to. 6 | /_build/ 7 | 8 | # If you run "mix test --cover", coverage assets end up here. 9 | /cover/ 10 | 11 | # The directory Mix downloads your dependencies sources to. 12 | /deps/ 13 | 14 | # Where third-party dependencies like ExDoc output generated docs. 15 | /doc/ 16 | 17 | # If the VM crashes, it generates a dump, let's ignore it too. 18 | erl_crash.dump 19 | 20 | # Also ignore archive artifacts (built via "mix archive.build"). 21 | *.ez 22 | 23 | # Ignore package tarball (built via "mix hex.build"). 24 | ash_ops-*.tar 25 | 26 | # Temporary files, for example, from tests. 27 | /tmp/ 28 | 29 | .elixir_ls 30 | -------------------------------------------------------------------------------- /test/ash_ops/task/action_test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.ActionTest do 6 | @moduledoc false 7 | use ExUnit.Case, async: true 8 | import ExUnit.CaptureIO 9 | 10 | setup do 11 | ansi_enabled? = Application.get_env(:elixir, :ansi_enabled) 12 | Application.put_env(:elixir, :ansi_enabled, false) 13 | 14 | on_exit(fn -> 15 | Application.put_env(:elixir, :ansi_enabled, ansi_enabled?) 16 | end) 17 | end 18 | 19 | test "a resource is encoded" do 20 | id = Ash.UUID.generate() 21 | 22 | output = 23 | capture_io(fn -> 24 | Mix.Task.rerun("ash_ops.example.publish_post", [id, "platform"]) 25 | end) 26 | 27 | assert output =~ ~r/id: #{id}/m 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /test/support/example.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule Example do 6 | @moduledoc false 7 | use Ash.Domain, otp_app: :ash_ops, extensions: [AshOps] 8 | 9 | mix_tasks do 10 | action __MODULE__.Post, :publish_post, :publish, arguments: [:id, :platform] 11 | get __MODULE__.Post, :get_post, :read 12 | list __MODULE__.Post, :list_posts, :read 13 | create __MODULE__.Post, :create_post, :create 14 | destroy __MODULE__.Post, :destroy_post, :destroy 15 | update __MODULE__.Post, :update_post, :update 16 | end 17 | 18 | resources do 19 | resource __MODULE__.Actor do 20 | define :create_actor, action: :create 21 | end 22 | 23 | resource __MODULE__.Post do 24 | define :create_post, action: :create 25 | define :update_post, action: :update 26 | define :get_post, action: :read, get_by: [:id] 27 | end 28 | end 29 | end 30 | -------------------------------------------------------------------------------- /LICENSES/MIT.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and 6 | associated documentation files (the "Software"), to deal in the Software without restriction, including 7 | without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 8 | copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the 9 | following conditions: 10 | 11 | The above copyright notice and this permission notice shall be included in all copies or substantial 12 | portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT 15 | LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO 16 | EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 17 | IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE 18 | USE OR OTHER DEALINGS IN THE SOFTWARE. 19 | -------------------------------------------------------------------------------- /lib/ash_ops/info.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Info do 6 | @moduledoc """ 7 | Auto-generated introspection for the `AshOps` extension. 8 | """ 9 | use Spark.InfoGenerator, extension: AshOps, sections: [:mix_tasks] 10 | 11 | @type domain_or_dsl :: module | Spark.Dsl.t() 12 | 13 | @doc """ 14 | Get a mix task by name. 15 | """ 16 | @spec mix_task(domain_or_dsl, atom) :: {:ok, AshOps.entity()} | {:error, any} 17 | def mix_task(domain, name) do 18 | domain 19 | |> mix_tasks() 20 | |> Enum.reduce_while({:error, "No mix task named `#{inspect(name)}`"}, fn 21 | %{name: ^name} = task, _ -> {:halt, {:ok, task}} 22 | _task, error -> {:cont, error} 23 | end) 24 | end 25 | 26 | @doc "Raising version of `mix_task/2`" 27 | @spec mix_task!(domain_or_dsl, atom) :: AshOps.entity() | no_return 28 | def mix_task!(domain, name) do 29 | case mix_task(domain, name) do 30 | {:ok, task} -> task 31 | {:error, reason} -> raise reason 32 | end 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /test/ash_ops/task/destroy_test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.DestroyTest do 6 | @moduledoc false 7 | use ExUnit.Case, async: true 8 | import ExUnit.CaptureIO 9 | 10 | setup do 11 | ansi_enabled? = Application.get_env(:elixir, :ansi_enabled) 12 | Application.put_env(:elixir, :ansi_enabled, false) 13 | 14 | on_exit(fn -> 15 | Application.put_env(:elixir, :ansi_enabled, ansi_enabled?) 16 | end) 17 | 18 | post = 19 | Example.create_post!(%{ 20 | title: Faker.Food.dish(), 21 | body: Faker.Food.description(), 22 | slug: Faker.Internet.slug(), 23 | tenant: Faker.Lorem.word() 24 | }) 25 | 26 | {:ok, post: post} 27 | end 28 | 29 | test "it destroys the record", %{post: post} do 30 | output = 31 | capture_io(fn -> 32 | Mix.Task.rerun("ash_ops.example.destroy_post", [to_string(post.id)]) 33 | end) 34 | 35 | assert output == "" 36 | assert {:error, error} = Ash.reload(post, authorize?: false) 37 | assert Exception.message(error) =~ "not found" 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 6 | 7 | ![Logo](https://github.com/ash-project/ash/blob/main/logos/cropped-for-header-black-text.png?raw=true#gh-light-mode-only) 8 | ![Logo](https://github.com/ash-project/ash/blob/main/logos/cropped-for-header-white-text.png?raw=true#gh-dark-mode-only) 9 | 10 | [![Ash CI](https://github.com/ash-project/ash_ops/actions/workflows/elixir.yml/badge.svg)](https://github.com/ash-project/ash_ops/actions/workflows/elixir.yml) 11 | [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 12 | [![Hex version badge](https://img.shields.io/hexpm/v/ash_ops.svg)](https://hex.pm/packages/ash_ops) 13 | [![Hexdocs badge](https://img.shields.io/badge/docs-hexdocs-purple)](https://hexdocs.pm/ash_ops) 14 | [![REUSE status](https://api.reuse.software/badge/github.com/ash-project/ash_ops)](https://api.reuse.software/info/github.com/ash-project/ash_ops) 15 | 16 | # AshOps 17 | 18 | Welcome! This is an extension for the [Ash framework](https://hexdocs.pm/ash) 19 | which exposes [actions](https://hexdocs.pm/ash/actions.html) as mix tasks on the 20 | command-line. 21 | 22 | ## Reference 23 | 24 | - [AshOps DSL](documentation/dsls/DSL-AshOps.md) 25 | -------------------------------------------------------------------------------- /lib/mix/tasks/ash_ops.install.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | if Code.ensure_loaded?(Igniter) do 6 | defmodule Mix.Tasks.AshOps.Install do 7 | @moduledoc """ 8 | Installs AshOps into a project. Should be called with `mix igniter.install ash_ops`. 9 | """ 10 | alias Igniter.{Mix.Task, Project.Formatter} 11 | 12 | @shortdoc "Installs AshOps into a project." 13 | 14 | use Task 15 | 16 | @doc false 17 | @impl Task 18 | def igniter(igniter) do 19 | igniter 20 | |> Formatter.import_dep(:ash_ops) 21 | end 22 | end 23 | else 24 | defmodule Mix.Tasks.AshOps.Install do 25 | @moduledoc """ 26 | Installs AshOps into a project. Should be called with `mix igniter.install ash_ops`. 27 | """ 28 | @shortdoc "Installs AshOps into a project." 29 | 30 | use Mix.Task 31 | 32 | def run(_argv) do 33 | Mix.shell().error(""" 34 | The task 'ash_ops.install' requires igniter to be run. 35 | 36 | Please install igniter and try again. 37 | 38 | For more information, see: https://hexdocs.pm/igniter 39 | """) 40 | 41 | exit({:shutdown, 1}) 42 | end 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | 6 | 7 | # Change Log 8 | 9 | All notable changes to this project will be documented in this file. 10 | See [Conventional Commits](Https://conventionalcommits.org) for commit guidelines. 11 | 12 | 13 | 14 | ## v0.2.4 (2025-04-15) 15 | 16 | 17 | 18 | 19 | ### Improvements: 20 | 21 | * encode record or records returned from generic actions 22 | 23 | ## v0.2.3 (2025-03-06) 24 | 25 | 26 | 27 | 28 | ### Bug Fixes: 29 | 30 | * Set the changeset's action when all inputs are parsed. (#3) 31 | 32 | ### Improvements: 33 | 34 | * Rename `--query` to `--filter` and add `--sort` option to list tasks. (#4) 35 | 36 | * Rename `--query` to `--filter` because that's what it is. 37 | 38 | * Add `--sort` option to list tasks. 39 | 40 | ## v0.2.2 (2025-03-06) 41 | 42 | 43 | 44 | 45 | ### Bug Fixes: 46 | 47 | * correct dependency options. 48 | 49 | ## v0.2.1 (2025-03-06) 50 | 51 | 52 | 53 | 54 | ### Bug Fixes: 55 | 56 | * git_ops generated configuration is incorrect. 57 | 58 | ### Improvements: 59 | 60 | * Add igniter installer 61 | 62 | ## v0.2.0 (2025-03-05) 63 | 64 | 65 | 66 | 67 | ### Features: 68 | 69 | * Add support for generic actions. (#5) 70 | 71 | * Add `update` mix tasks. (#4) 72 | 73 | * Add `destroy` tasks. (#3) 74 | 75 | * Add mix tasks for create actions. (#2) 76 | 77 | * Add `list` task type. (#1) 78 | 79 | ## v0.1.0 (2025-02-26) 80 | 81 | 82 | 83 | 84 | ### Features: 85 | 86 | * Add the ability to define a get task. 87 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | import Config 6 | 7 | config :ash, 8 | allow_forbidden_field_for_relationships_by_default?: true, 9 | include_embedded_source_by_default?: false, 10 | show_keysets_for_all_actions?: false, 11 | default_page_type: :keyset, 12 | policies: [no_filter_static_forbidden_reads?: false] 13 | 14 | config :spark, 15 | formatter: [ 16 | remove_parens?: true, 17 | "Ash.Resource": [ 18 | section_order: [ 19 | :actions, 20 | :aggregates, 21 | :attributes, 22 | :calculations, 23 | :changes, 24 | :code_interface, 25 | :ets, 26 | :identities, 27 | :multitenancy, 28 | :policies, 29 | :postgres, 30 | :preparations, 31 | :pub_sub, 32 | :relationships, 33 | :resource, 34 | :validations 35 | ] 36 | ], 37 | "Ash.Domain": [ 38 | section_order: [ 39 | :authorization, 40 | :domain, 41 | :execution, 42 | :mix_tasks, 43 | :policies, 44 | :resources 45 | ] 46 | ] 47 | ] 48 | 49 | if Mix.env() in [:dev, :test] do 50 | config :git_ops, 51 | mix_project: Mix.Project.get!(), 52 | types: [types: [tidbit: [hidden?: true], important: [header: "Important Changes"]]], 53 | version_tag_prefix: "v", 54 | manage_mix_version?: true, 55 | manage_readme_version: true 56 | 57 | config :ash_ops, ash_domains: [Example] 58 | end 59 | 60 | import_config "#{config_env()}.exs" 61 | -------------------------------------------------------------------------------- /lib/ash_ops.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps do 6 | @moduledoc """ 7 | An extension for `Ash.Domain` that adds the ability expose resource actions as 8 | mix tasks. 9 | """ 10 | 11 | @mix_tasks %Spark.Dsl.Section{ 12 | name: :mix_tasks, 13 | describe: """ 14 | Resource actions to expose as mix tasks. 15 | """, 16 | examples: [ 17 | """ 18 | mix_tasks do 19 | action Post, :publish_post, :publish 20 | create Post, :create_post, :create 21 | destroy Post, :destroy_post, :destroy 22 | get Post, :get_post, :read 23 | list Post, :list_posts, :read 24 | update Post, :update_post, :update 25 | end 26 | """ 27 | ], 28 | entities: [ 29 | __MODULE__.Entity.Action.__entity__(), 30 | __MODULE__.Entity.Create.__entity__(), 31 | __MODULE__.Entity.Destroy.__entity__(), 32 | __MODULE__.Entity.Get.__entity__(), 33 | __MODULE__.Entity.List.__entity__(), 34 | __MODULE__.Entity.Update.__entity__() 35 | ] 36 | } 37 | 38 | use Spark.Dsl.Extension, 39 | sections: [@mix_tasks], 40 | transformers: [__MODULE__.Transformer.PrepareTask], 41 | verifiers: [__MODULE__.Verifier.VerifyTask] 42 | 43 | @type entity :: 44 | __MODULE__.Entity.Action.t() 45 | | __MODULE__.Entity.Create.t() 46 | | __MODULE__.Entity.Destroy.t() 47 | | __MODULE__.Entity.Get.t() 48 | | __MODULE__.Entity.List.t() 49 | | __MODULE__.Entity.Update.t() 50 | end 51 | -------------------------------------------------------------------------------- /test/support/example/post.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule Example.Post do 6 | @moduledoc false 7 | use Ash.Resource, 8 | data_layer: Ash.DataLayer.Ets, 9 | domain: Example, 10 | authorizers: [Ash.Policy.Authorizer] 11 | 12 | actions do 13 | defaults [:read, :destroy, create: :*, update: :*] 14 | 15 | action :publish, :struct do 16 | constraints instance_of: __MODULE__ 17 | argument :id, :uuid, public?: true, allow_nil?: false 18 | argument :platform, :string, public?: true, allow_nil?: false 19 | run fn input, _ -> {:ok, %__MODULE__{id: input.arguments.id}} end 20 | end 21 | end 22 | 23 | attributes do 24 | uuid_v7_primary_key :id 25 | attribute :title, :string, allow_nil?: false, public?: true 26 | attribute :body, :string, allow_nil?: false, public?: true 27 | attribute :slug, :string, allow_nil?: false, public?: true 28 | attribute :tenant, :string, allow_nil?: true, public?: true 29 | create_timestamp :inserted_at 30 | update_timestamp :updated_at 31 | end 32 | 33 | calculations do 34 | calculate :length, :integer, expr(string_length(body)), public?: true 35 | calculate :long, :boolean, expr(length > 10), public?: true 36 | end 37 | 38 | ets do 39 | table :posts 40 | private? true 41 | end 42 | 43 | identities do 44 | identity :unique_slug, [:slug], pre_check_with: :read 45 | end 46 | 47 | multitenancy do 48 | strategy :attribute 49 | attribute :tenant 50 | global? true 51 | end 52 | 53 | policies do 54 | policy actor_present() do 55 | authorize_if actor_attribute_equals(:is_good, true) 56 | end 57 | 58 | policy always() do 59 | authorize_if always() 60 | end 61 | end 62 | 63 | relationships do 64 | belongs_to :author, Example.Actor, public?: true 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /test/ash_ops/task/update_test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.UpdateTest do 6 | @moduledoc false 7 | use ExUnit.Case, async: true 8 | import ExUnit.CaptureIO 9 | 10 | setup do 11 | ansi_enabled? = Application.get_env(:elixir, :ansi_enabled) 12 | Application.put_env(:elixir, :ansi_enabled, false) 13 | 14 | on_exit(fn -> 15 | Application.put_env(:elixir, :ansi_enabled, ansi_enabled?) 16 | end) 17 | 18 | post = 19 | Example.create_post!(%{ 20 | title: Faker.Food.dish(), 21 | body: Faker.Food.description(), 22 | slug: Faker.Internet.slug(), 23 | tenant: Faker.Lorem.word() 24 | }) 25 | 26 | {:ok, post: post} 27 | end 28 | 29 | test "it can update the record with YAML", %{post: post} do 30 | body = 31 | post.body 32 | |> String.replace(~r/[aeiou]/i, "") 33 | 34 | yaml = "body: \"#{body}\"\n" 35 | 36 | output = 37 | capture_io(:stdio, yaml, fn -> 38 | Mix.Task.rerun("ash_ops.example.update_post", [post.id, "--input", "yaml"]) 39 | end) 40 | 41 | assert {:ok, output} = YamlElixir.read_from_string(output) 42 | assert {:ok, post} = Example.get_post(output["id"], authorize?: false) 43 | assert post.body == body 44 | end 45 | 46 | test "it can update the record with JSON", %{post: post} do 47 | body = 48 | post.body 49 | |> String.replace(~r/[aeiou]/i, "") 50 | 51 | json = Jason.encode!(%{body: body}) 52 | 53 | output = 54 | capture_io(:stdio, json, fn -> 55 | Mix.Task.rerun("ash_ops.example.update_post", [ 56 | post.id, 57 | "--input", 58 | "json", 59 | "--format", 60 | "json" 61 | ]) 62 | end) 63 | 64 | assert {:ok, output} = Jason.decode(output) 65 | assert {:ok, post} = Example.get_post(output["id"], authorize?: false) 66 | assert post.body == body 67 | end 68 | end 69 | -------------------------------------------------------------------------------- /test/ash_ops/task/create_test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.CreateTest do 6 | @moduledoc false 7 | use ExUnit.Case, async: true 8 | import ExUnit.CaptureIO 9 | 10 | setup do 11 | ansi_enabled? = Application.get_env(:elixir, :ansi_enabled) 12 | Application.put_env(:elixir, :ansi_enabled, false) 13 | 14 | on_exit(fn -> 15 | Application.put_env(:elixir, :ansi_enabled, ansi_enabled?) 16 | end) 17 | 18 | :ok 19 | end 20 | 21 | test "records can be created with YAML input" do 22 | input = %{ 23 | title: Faker.Food.dish(), 24 | body: Faker.Food.description(), 25 | slug: Faker.Internet.slug(), 26 | tenant: Faker.Lorem.word() 27 | } 28 | 29 | yaml = Enum.map_join(input, "\n", fn {name, value} -> "#{name}: #{value}" end) 30 | 31 | output = 32 | capture_io(:stdio, yaml, fn -> 33 | Mix.Task.rerun("ash_ops.example.create_post", ["--input", "yaml"]) 34 | end) 35 | 36 | assert {:ok, output} = YamlElixir.read_from_string(output) 37 | assert {:ok, post} = Example.get_post(output["id"], authorize?: false) 38 | 39 | for {key, value} <- input do 40 | assert Map.fetch!(post, key) == value 41 | end 42 | end 43 | 44 | test "records can be created with JSON input" do 45 | input = %{ 46 | title: Faker.Food.dish(), 47 | body: Faker.Food.description(), 48 | slug: Faker.Internet.slug(), 49 | tenant: Faker.Lorem.word() 50 | } 51 | 52 | json = Jason.encode!(input) 53 | 54 | output = 55 | capture_io(:stdio, json, fn -> 56 | Mix.Task.rerun("ash_ops.example.create_post", ["--input", "json", "--format", "json"]) 57 | end) 58 | 59 | assert {:ok, output} = Jason.decode(output) 60 | assert {:ok, post} = Example.get_post(output["id"], authorize?: false) 61 | 62 | for {key, value} <- input do 63 | assert Map.fetch!(post, key) == value 64 | end 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /lib/ash_ops/verifier/verify_task.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Verifier.VerifyTask do 6 | @moduledoc """ 7 | A Spark DSL verifier for mix task entities. 8 | """ 9 | use Spark.Dsl.Verifier 10 | import Spark.Dsl.Verifier 11 | 12 | alias AshOps.Info, as: AOI 13 | alias Spark.Error.DslError 14 | 15 | @doc false 16 | def verify(dsl) do 17 | dsl 18 | |> AOI.mix_tasks() 19 | |> Enum.reduce_while(:ok, fn task, :ok -> 20 | case verify_entity(task, dsl) do 21 | :ok -> {:cont, :ok} 22 | {:error, reason} -> {:halt, {:error, reason}} 23 | end 24 | end) 25 | end 26 | 27 | defp verify_entity(task, dsl) do 28 | verify_arguments(task, dsl) 29 | end 30 | 31 | defp verify_arguments(task, _dsl) when task.arguments == [], do: :ok 32 | 33 | defp verify_arguments(task, dsl) do 34 | action_arguments = 35 | task.action.arguments 36 | |> Enum.filter(& &1.public?) 37 | |> MapSet.new(& &1.name) 38 | 39 | entity_arguments = MapSet.new(task.arguments) 40 | 41 | entity_arguments 42 | |> MapSet.difference(action_arguments) 43 | |> Enum.to_list() 44 | |> case do 45 | [] -> 46 | :ok 47 | 48 | [spurious] -> 49 | {:error, 50 | DslError.exception( 51 | module: get_persisted(dsl, :module), 52 | path: [:mix_tasks, task.type, task.name, :arguments], 53 | message: """ 54 | The action `#{inspect(task.action.name)}` on the `#{inspect(task.resource)}` resource does not accept the following argument, either because it is not defined or not public: 55 | 56 | - `#{inspect(spurious)}` 57 | """ 58 | )} 59 | 60 | spurious -> 61 | {:error, 62 | DslError.exception( 63 | module: get_persisted(dsl, :module), 64 | path: [:mix_tasks, task.type, task.name, :arguments], 65 | message: """ 66 | The action `#{inspect(task.action.name)}` on the `#{inspect(task.resource)}` resource does not accept the following arguments, either because they are not defined or not public: 67 | 68 | #{Enum.map_join(spurious, "\n", &"- `#{inspect(&1)}`")} 69 | """ 70 | )} 71 | end 72 | end 73 | end 74 | -------------------------------------------------------------------------------- /lib/ash_ops/entity/create.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Entity.Create do 6 | @moduledoc """ 7 | The `mix_tasks.create` DSL entity. 8 | """ 9 | 10 | defstruct [ 11 | :__identifier__, 12 | :__spark_metadata__, 13 | :action, 14 | :description, 15 | :domain, 16 | :name, 17 | :prefix, 18 | :resource, 19 | :task_name, 20 | arguments: [], 21 | type: :create 22 | ] 23 | 24 | @type t :: %__MODULE__{ 25 | __identifier__: any, 26 | __spark_metadata__: Spark.Dsl.Entity.spark_meta(), 27 | action: atom | Ash.Resource.Actions.Create.t(), 28 | arguments: [], 29 | description: nil | String.t(), 30 | domain: module, 31 | name: atom, 32 | prefix: atom, 33 | resource: module, 34 | task_name: atom, 35 | type: :create 36 | } 37 | 38 | @doc false 39 | def __entity__ do 40 | %Spark.Dsl.Entity{ 41 | name: :create, 42 | describe: """ 43 | Generate a mix task which calls a create action and returns the created 44 | record. 45 | 46 | ## Example 47 | 48 | Defining the following `create` in your domain: 49 | 50 | ```elixir 51 | mix_tasks do 52 | create Post, :create_post, :create 53 | end 54 | ``` 55 | 56 | Will result in the following mix task being available: 57 | 58 | ```bash 59 | mix my_app.blog.create_post 60 | ``` 61 | """, 62 | target: __MODULE__, 63 | identifier: :name, 64 | args: [:resource, :name, :action], 65 | schema: [ 66 | action: [ 67 | type: :atom, 68 | required: true, 69 | doc: "The name of the create action to use" 70 | ], 71 | description: [ 72 | type: :string, 73 | required: false, 74 | doc: "Documentation to be displayed in the mix task's help section" 75 | ], 76 | name: [ 77 | type: :atom, 78 | required: true, 79 | doc: "The name of the mix task to generate" 80 | ], 81 | prefix: [ 82 | type: :atom, 83 | required: false, 84 | doc: 85 | "The prefix to use for the mix task name (ie the part before the first \".\"). Defaults to the `otp_app` setting of the domain" 86 | ], 87 | resource: [ 88 | type: {:spark, Ash.Resource}, 89 | required: true, 90 | doc: "The resource whose actions to use" 91 | ] 92 | ] 93 | } 94 | end 95 | end 96 | -------------------------------------------------------------------------------- /lib/ash_ops/entity/list.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Entity.List do 6 | @moduledoc """ 7 | The `mix_tasks.list` DSL entity. 8 | """ 9 | 10 | defstruct [ 11 | :__identifier__, 12 | :__spark_metadata__, 13 | :action, 14 | :description, 15 | :domain, 16 | :name, 17 | :prefix, 18 | :resource, 19 | :task_name, 20 | arguments: [], 21 | type: :list 22 | ] 23 | 24 | @type t :: %__MODULE__{ 25 | __identifier__: any, 26 | __spark_metadata__: Spark.Dsl.Entity.spark_meta(), 27 | action: atom | Ash.Resource.Actions.Read.t(), 28 | arguments: [atom], 29 | description: nil | String.t(), 30 | domain: module, 31 | name: atom, 32 | prefix: atom, 33 | resource: module, 34 | task_name: atom, 35 | type: :list 36 | } 37 | 38 | @doc false 39 | def __entity__ do 40 | %Spark.Dsl.Entity{ 41 | name: :list, 42 | describe: """ 43 | Generate a mix task which calls a read action and returns any matching records. 44 | 45 | ## Example 46 | 47 | Define the following `list` in your domain:application 48 | 49 | ```elixir 50 | mix_tasks do 51 | list Post, :list_posts, :read 52 | end 53 | ``` 54 | 55 | Will result in the following mix task being available:application 56 | 57 | ```bash 58 | mix my_app.blog.list_posts 59 | ``` 60 | """, 61 | target: __MODULE__, 62 | identifier: :name, 63 | args: [:resource, :name, :action], 64 | schema: [ 65 | action: [ 66 | type: :atom, 67 | required: true, 68 | doc: "The name of the read action to use" 69 | ], 70 | arguments: [ 71 | type: {:wrap_list, :atom}, 72 | required: false, 73 | doc: 74 | "A comma-separated list of action arguments can be taken as positional arguments on the command line" 75 | ], 76 | description: [ 77 | type: :string, 78 | required: false, 79 | doc: "Documentation to be displayed in the mix task's help section" 80 | ], 81 | name: [ 82 | type: :atom, 83 | required: true, 84 | doc: "The name of the mix task to generate" 85 | ], 86 | prefix: [ 87 | type: :atom, 88 | required: false, 89 | doc: 90 | "The prefix to use for the mix task name (ie the part before the first \".\"). Defaults to the `otp_app` setting of the domain" 91 | ], 92 | resource: [ 93 | type: {:spark, Ash.Resource}, 94 | required: true, 95 | doc: "The resource whose action to use" 96 | ] 97 | ] 98 | } 99 | end 100 | end 101 | -------------------------------------------------------------------------------- /lib/ash_ops/entity/action.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Entity.Action do 6 | @moduledoc """ 7 | The `mix_tasks.action` DSL entity. 8 | """ 9 | 10 | defstruct [ 11 | :__identifier__, 12 | :__spark_metadata__, 13 | :action, 14 | :description, 15 | :domain, 16 | :name, 17 | :prefix, 18 | :resource, 19 | :task_name, 20 | arguments: [], 21 | type: :action 22 | ] 23 | 24 | @type t :: %__MODULE__{ 25 | __identifier__: any, 26 | __spark_metadata__: Spark.Dsl.Entity.spark_meta(), 27 | action: atom | Ash.Resource.Actions.Action.t(), 28 | arguments: [], 29 | description: nil | String.t(), 30 | domain: module, 31 | name: atom, 32 | prefix: atom, 33 | resource: module, 34 | task_name: atom, 35 | type: :action 36 | } 37 | 38 | @doc false 39 | def __entity__ do 40 | %Spark.Dsl.Entity{ 41 | name: :action, 42 | describe: """ 43 | Generate a mix task which calls a generic action and returns the created 44 | record. 45 | 46 | ## Example 47 | 48 | Defining the following `action` in your domain: 49 | 50 | ```elixir 51 | mix_tasks do 52 | action Post, :publish_post, :publish, arguments: [:id, :platform] 53 | end 54 | ``` 55 | 56 | Will result in the following mix task being available: 57 | 58 | ```bash 59 | mix my_app.blog.publish_post 60 | ``` 61 | """, 62 | target: __MODULE__, 63 | identifier: :name, 64 | args: [:resource, :name, :action], 65 | schema: [ 66 | action: [ 67 | type: :atom, 68 | required: true, 69 | doc: "The name of the action to use" 70 | ], 71 | arguments: [ 72 | type: {:wrap_list, :atom}, 73 | required: false, 74 | default: [], 75 | doc: 76 | "A list of action arguments which should be taken as positional arguments on the command line" 77 | ], 78 | description: [ 79 | type: :string, 80 | required: false, 81 | doc: "Documentation to be displayed in the mix task's help section" 82 | ], 83 | name: [ 84 | type: :atom, 85 | required: true, 86 | doc: "The name of the mix task to generate" 87 | ], 88 | prefix: [ 89 | type: :atom, 90 | required: false, 91 | doc: 92 | "The prefix to use for the mix task name (ie the part before the first \".\"). Defaults to the `otp_app` setting of the domain" 93 | ], 94 | resource: [ 95 | type: {:spark, Ash.Resource}, 96 | required: true, 97 | doc: "The resource whose actions to use" 98 | ] 99 | ] 100 | } 101 | end 102 | end 103 | -------------------------------------------------------------------------------- /lib/ash_ops/entity/get.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Entity.Get do 6 | @moduledoc """ 7 | The `mix_tasks.get` DSL entity. 8 | """ 9 | 10 | defstruct [ 11 | :__identifier__, 12 | :__spark_metadata__, 13 | :action, 14 | :description, 15 | :domain, 16 | :name, 17 | :prefix, 18 | :resource, 19 | :task_name, 20 | arguments: [], 21 | type: :get 22 | ] 23 | 24 | @type t :: %__MODULE__{ 25 | __identifier__: any, 26 | __spark_metadata__: Spark.Dsl.Entity.spark_meta(), 27 | action: atom | Ash.Resource.Actions.Read.t(), 28 | arguments: [atom], 29 | description: nil | String.t(), 30 | domain: module, 31 | name: atom, 32 | prefix: atom, 33 | resource: module, 34 | task_name: atom, 35 | type: :get 36 | } 37 | 38 | @doc false 39 | def __entity__ do 40 | %Spark.Dsl.Entity{ 41 | name: :get, 42 | describe: """ 43 | Generate a mix task which calls a read action and returns a single record 44 | by primary key or identity. 45 | 46 | ## Example 47 | 48 | Defining the following `get` in your domain: 49 | 50 | ```elixir 51 | mix_tasks do 52 | get Post, :get_post, :read 53 | end 54 | ``` 55 | 56 | Will result in the following mix task being available: 57 | 58 | ```bash 59 | mix my_app.blog.get_post "01953abc-c4e9-7661-a79a-243b0d982ab7" 60 | title: Example blog post 61 | body: This is the example blog post 62 | ``` 63 | """, 64 | target: __MODULE__, 65 | identifier: :name, 66 | args: [:resource, :name, :action], 67 | schema: [ 68 | action: [ 69 | type: :atom, 70 | required: true, 71 | doc: "The name of the read action to use" 72 | ], 73 | arguments: [ 74 | type: {:wrap_list, :atom}, 75 | required: false, 76 | default: [], 77 | doc: 78 | "A list of action arguments which should be taken as positional arguments on the command line" 79 | ], 80 | description: [ 81 | type: :string, 82 | required: false, 83 | doc: "Documentation to be displayed in the mix task's help section" 84 | ], 85 | name: [ 86 | type: :atom, 87 | required: true, 88 | doc: "The name of the mix task to generate" 89 | ], 90 | prefix: [ 91 | type: :atom, 92 | required: false, 93 | doc: 94 | "The prefix to use for the mix task name (ie the part before the first \".\"). Defaults to the `otp_app` setting of the domain" 95 | ], 96 | resource: [ 97 | type: {:spark, Ash.Resource}, 98 | required: true, 99 | doc: "The resource whose action to use" 100 | ] 101 | ] 102 | } 103 | end 104 | end 105 | -------------------------------------------------------------------------------- /lib/ash_ops/task/get.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.Get do 6 | @moduledoc """ 7 | Provides the implementation of the `get` mix task. 8 | 9 | This should only ever be called from the mix task itself. 10 | """ 11 | alias Ash.Query 12 | alias AshOps.Task.ArgSchema 13 | 14 | import AshOps.Task.Common 15 | require Query 16 | 17 | @doc false 18 | def run(argv, task, arg_schema) do 19 | with {:ok, cfg} <- ArgSchema.parse(arg_schema, argv), 20 | {:ok, actor} <- load_actor(cfg[:actor], cfg[:tenant]), 21 | cfg <- Map.put(cfg, :actor, actor), 22 | {:ok, record} <- load_record(task, cfg), 23 | {:ok, output} <- serialise_record(record, task.resource, cfg) do 24 | Mix.shell().info(output) 25 | 26 | :ok 27 | else 28 | {:error, reason} -> handle_error({:error, reason}) 29 | end 30 | end 31 | 32 | defp load_record(task, cfg) do 33 | opts = 34 | cfg 35 | |> Map.take([:load, :actor, :tenant]) 36 | |> Map.put(:domain, task.domain) 37 | |> Map.put(:not_found_error?, true) 38 | |> Map.put(:authorize_with, :error) 39 | |> Enum.to_list() 40 | 41 | with {:ok, field} <- identity_or_pk_field(task.resource, cfg) do 42 | task.resource 43 | |> Query.new() 44 | |> Query.for_read(task.action.name) 45 | |> Query.filter_input(%{field => %{"eq" => cfg.positional_arguments.id}}) 46 | |> Ash.read_one(opts) 47 | end 48 | end 49 | 50 | @doc false 51 | defmacro __using__(opts) do 52 | quote generated: true do 53 | @task unquote(opts[:task]) 54 | @arg_schema @task 55 | |> ArgSchema.default() 56 | |> ArgSchema.prepend_positional(:id, "A unique identifier for the record") 57 | |> ArgSchema.add_switch( 58 | :identity, 59 | :string, 60 | [ 61 | type: {:custom, AshOps.Task.Types, :identity, [@task]}, 62 | required: false, 63 | doc: "The identity to use to retrieve the record." 64 | ], 65 | [:i] 66 | ) 67 | 68 | @shortdoc "Get a single `#{inspect(@task.resource)}` record using the `#{@task.action.name}` action" 69 | 70 | @moduledoc """ 71 | #{@shortdoc} 72 | 73 | #{if @task.description, do: "#{@task.description}\n\n"} 74 | #{if @task.action.description, do: """ 75 | ## Action 76 | 77 | #{@task.action.description} 78 | 79 | """} 80 | ## Usage 81 | 82 | Records are looked up by their primary key unless the `--identity` option 83 | is used. The identity must not be composite (ie only contain a single 84 | field). 85 | #{ArgSchema.usage(@task, @arg_schema)} 86 | """ 87 | use Mix.Task 88 | 89 | @requirements ["app.start"] 90 | 91 | @impl Mix.Task 92 | def run(args) do 93 | unquote(__MODULE__).run(args, @task, @arg_schema) 94 | end 95 | end 96 | end 97 | end 98 | -------------------------------------------------------------------------------- /lib/ash_ops/task/destroy.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.Destroy do 6 | @moduledoc """ 7 | Provides the implementation of the `destroy` mix task. 8 | 9 | This should only ever be called from the mix task itself. 10 | """ 11 | alias Ash.Query 12 | alias AshOps.Task.ArgSchema 13 | require Query 14 | import AshOps.Task.Common 15 | 16 | @doc false 17 | def run(argv, task, arg_schema) do 18 | with {:ok, cfg} <- ArgSchema.parse(arg_schema, argv), 19 | {:ok, actor} <- load_actor(cfg[:actor], cfg[:tenant]), 20 | cfg <- Map.put(cfg, :actor, actor), 21 | :ok <- destroy_record(task, cfg) do 22 | :ok 23 | else 24 | {:error, reason} -> handle_error(reason) 25 | end 26 | end 27 | 28 | defp destroy_record(task, cfg) do 29 | opts = 30 | cfg 31 | |> Map.take([:load, :actor, :tenant]) 32 | |> Map.put(:domain, task.domain) 33 | |> Enum.to_list() 34 | 35 | with {:ok, field} <- identity_or_pk_field(task.resource, cfg) do 36 | task.resource 37 | |> Query.new() 38 | |> Query.filter_input(%{field => %{"eq" => cfg.positional_arguments.id}}) 39 | |> Query.for_read(task.read_action.name) 40 | |> Ash.bulk_destroy(task.action.name, %{}, opts) 41 | |> case do 42 | %{status: :success} -> :ok 43 | %{errors: errors} -> {:error, Ash.Error.to_error(errors)} 44 | end 45 | end 46 | end 47 | 48 | @doc false 49 | defmacro __using__(opts) do 50 | quote generated: true do 51 | @task unquote(opts[:task]) 52 | @arg_schema @task 53 | |> ArgSchema.default() 54 | |> ArgSchema.prepend_positional(:id, "A unique identifier for the record") 55 | |> ArgSchema.add_switch( 56 | :identity, 57 | :string, 58 | [ 59 | type: {:custom, AshOps.Task.Types, :identity, [@task]}, 60 | required: false, 61 | doc: "The identity to use to retrieve the record." 62 | ], 63 | [:i] 64 | ) 65 | 66 | @shortdoc "Destroy a single `#{inspect(@task.resource)}` record using the `#{@task.action.name}` action" 67 | 68 | @moduledoc """ 69 | #{@shortdoc} 70 | 71 | #{if @task.description, do: "#{@task.description}\n\n"} 72 | #{if @task.action.description, do: """ 73 | ## Action 74 | 75 | #{@task.action.description} 76 | 77 | """} 78 | ## Usage 79 | 80 | Records are looked up by their primary key unless the `--identity` option 81 | is used. The identity must not be composite (ie only contain a single 82 | field). 83 | 84 | Matching records are destroyed. 85 | #{ArgSchema.usage(@task, @arg_schema)} 86 | """ 87 | use Mix.Task 88 | 89 | @requirements ["app.start"] 90 | 91 | @impl Mix.Task 92 | def run(args) do 93 | unquote(__MODULE__).run(args, @task, @arg_schema) 94 | end 95 | end 96 | end 97 | end 98 | -------------------------------------------------------------------------------- /src/ash_ops_query.peg: -------------------------------------------------------------------------------- 1 | query <- expr; 2 | 3 | expr <- lhs:expr_single rhs:(space? op space? expr_single)* 4 | ` 5 | Lhs = proplists:get_value(lhs, Node), 6 | Rhs = proplists:get_value(rhs, Node), 7 | Rhs2 = lists:flatmap(fun([_, Op, _, E]) -> [Op, E] end, Rhs), 8 | [Lhs | Rhs2] 9 | `; 10 | expr_single <- array / braced / function / literal / path; 11 | braced <- '(' space? e:expr space? ')' 12 | ` 13 | proplists:get_value(e, Node) 14 | `; 15 | 16 | function <- name:ident '(' space? args:function_args? space? ')' 17 | ` 18 | Name = proplists:get_value(name, Node), 19 | Args = proplists:get_value(args, Node), 20 | {function, Name, Args} 21 | `; 22 | 23 | function_args <- head:(expr space? "," space?)* tail:expr 24 | ` 25 | Head = lists:flatmap(fun([E, _, _, _]) -> E end, proplists:get_value(head, Node)), 26 | Tail = proplists:get_value(tail, Node), 27 | lists:append(Head, Tail) 28 | `; 29 | 30 | 31 | op <- op_and / op_or / op_eq / op_neq / op_concat / op_gte / op_gt / op_lte / op_lt / op_in / op_mul / op_div / op_add / op_sub; 32 | op_mul <- '*' / 'times' `{op, '*', left, 8}`; 33 | op_div <- '/' / 'div' `{op, '/', left, 8}`; 34 | op_add <- '+' / 'plus' `{op, '+', left, 7}`; 35 | op_sub <- '-' / 'minus' `{op, '-', left, 7}`; 36 | op_concat <- '<>' / 'concat' `{op, '<>', right, 6}`; 37 | op_in <- 'in' `{op, in, left, 5}`; 38 | op_gt <- '>' / 'gt' / 'greater_than' `{op, '>', left, 4}`; 39 | op_gte <- '>=' / 'gte' / 'greater_than_or_equal' `{op, '>=', left, 4}`; 40 | op_lt <- '<' / 'lt' / 'less_than' `{op, '<', left, 4}`; 41 | op_lte <- '<=' / 'lte' / 'less_than_or_equal' `{op, '<=', left, 4}`; 42 | op_eq <- '==' / 'eq' / 'equals' `{op, '==', left, 3}`; 43 | op_neq <- '!=' / 'not_eq' / 'not_equals' `{op, '!=', left, 3}`; 44 | op_and <- '&&' / 'and' `{op, '&&', left, 2}`; 45 | op_or <- '||' / 'or' `{op, '||', left, 1}`; 46 | 47 | path <- head:ident tail:('.' path_element)* 48 | ` 49 | Head = proplists:get_value(head, Node), 50 | Tail = lists:map(fun([_, E]) -> E end, proplists:get_value(tail, Node)), 51 | {path, [Head | Tail]} 52 | `; 53 | 54 | path_element <- ident; 55 | 56 | array <- '[' space? elements:array_elements? space? ']' 57 | ` 58 | proplists:get_value(elements, Node) 59 | `; 60 | 61 | array_elements <- head:(expr space? "," space?)* tail:expr 62 | ` 63 | Head = lists:flatmap(fun([E, _, _, _]) -> E end, proplists:get_value(head, Node)), 64 | Tail = proplists:get_value(tail, Node), 65 | {array, lists:append(Head, Tail)} 66 | `; 67 | 68 | literal <- boolean / float / integer / string; 69 | boolean <- boolean_true / boolean_false; 70 | boolean_true <- 'true' `{boolean, true}`; 71 | boolean_false <- 'false' `{boolean, false}`; 72 | integer <- '-'? ('0' / ([1-9] [0-9]*)) 73 | ` 74 | Number = iolist_to_binary(Node), 75 | {integer, binary_to_integer(Number)} 76 | `; 77 | float <- '-'? ([0-9]+ '.' [0-9]+) 78 | ` 79 | Number = iolist_to_binary(Node), 80 | {float, binary_to_float(Number)} 81 | `; 82 | 83 | string <- string_double / string_single; 84 | string_double <- '"' chars:(!'"' ("\\\\" / '\\"' / .))* '"' `{string, iolist_to_binary(proplists:get_value(chars, Node))}`; 85 | string_single <- "'" chars:(!"'" ("\\\\" / "\\'" / .))* "'" `{string, iolist_to_binary(proplists:get_value(chars, Node))}`; 86 | 87 | ident <- [a-zA-Z_] [a-zA-Z0-9_]* `{ident, iolist_to_binary(Node)}`; 88 | space <- [ \t\n\s\r]* ~; 89 | -------------------------------------------------------------------------------- /lib/ash_ops/entity/update.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Entity.Update do 6 | @moduledoc """ 7 | The `mix_tasks.update` DSL entity. 8 | """ 9 | 10 | defstruct [ 11 | :__identifier__, 12 | :__spark_metadata__, 13 | :action, 14 | :description, 15 | :domain, 16 | :name, 17 | :prefix, 18 | :read_action, 19 | :resource, 20 | :task_name, 21 | arguments: [], 22 | type: :update 23 | ] 24 | 25 | @type t :: %__MODULE__{ 26 | __identifier__: any, 27 | __spark_metadata__: Spark.Dsl.Entity.spark_meta(), 28 | action: atom | Ash.Resource.Actions.Update.t(), 29 | arguments: [atom], 30 | description: nil | String.t(), 31 | domain: module, 32 | name: atom, 33 | prefix: atom, 34 | read_action: nil | atom | Ash.Resource.Actions.Read.t(), 35 | resource: module, 36 | task_name: atom, 37 | type: :update 38 | } 39 | 40 | @doc false 41 | def __entity__ do 42 | %Spark.Dsl.Entity{ 43 | name: :update, 44 | describe: """ 45 | Generate a mix task which calls an update action and updates a single record 46 | by primary key or identity. 47 | 48 | ## Example 49 | 50 | Defining the following `update` in your domain: 51 | 52 | ```elixir 53 | mix_tasks do 54 | update Post, :update_post, :update 55 | end 56 | ``` 57 | 58 | Will result in the following mix task being available: 59 | 60 | ```bash 61 | mix my_app.blog.update_post "01953abc-c4e9-7661-a79a-243b0d982ab7" 62 | ``` 63 | """, 64 | target: __MODULE__, 65 | identifier: :name, 66 | args: [:resource, :name, :action], 67 | schema: [ 68 | action: [ 69 | type: :atom, 70 | required: true, 71 | doc: "The name of the destroy action to use" 72 | ], 73 | arguments: [ 74 | type: {:wrap_list, :atom}, 75 | required: false, 76 | default: [], 77 | doc: 78 | "A list of action arguments which should be taken as positional arguments on the command line" 79 | ], 80 | description: [ 81 | type: :string, 82 | required: false, 83 | doc: "Documentation to be displayed in the mix task's help section" 84 | ], 85 | name: [ 86 | type: :atom, 87 | required: true, 88 | doc: "The name of the mix task to generate" 89 | ], 90 | prefix: [ 91 | type: :atom, 92 | required: false, 93 | doc: 94 | "The prefix to use for the mix task name (ie the part before the first \".\"). Defaults to the `otp_app` setting of the domain" 95 | ], 96 | read_action: [ 97 | type: :atom, 98 | required: false, 99 | doc: 100 | "The read action to use to query for matching records to update. Defaults to the primary read action." 101 | ], 102 | resource: [ 103 | type: {:spark, Ash.Resource}, 104 | required: true, 105 | doc: "The resource whose action to use" 106 | ] 107 | ] 108 | } 109 | end 110 | end 111 | -------------------------------------------------------------------------------- /lib/ash_ops/entity/destroy.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Entity.Destroy do 6 | @moduledoc """ 7 | The `mix_tasks.destroy` DSL entity. 8 | """ 9 | 10 | defstruct [ 11 | :__identifier__, 12 | :__spark_metadata__, 13 | :action, 14 | :description, 15 | :domain, 16 | :name, 17 | :prefix, 18 | :read_action, 19 | :resource, 20 | :task_name, 21 | arguments: [], 22 | type: :destroy 23 | ] 24 | 25 | @type t :: %__MODULE__{ 26 | __identifier__: any, 27 | __spark_metadata__: Spark.Dsl.Entity.spark_meta(), 28 | action: atom | Ash.Resource.Actions.Destroy.t(), 29 | arguments: [atom], 30 | description: nil | String.t(), 31 | domain: module, 32 | name: atom, 33 | prefix: atom, 34 | read_action: nil | atom | Ash.Resource.Actions.Read.t(), 35 | resource: module, 36 | task_name: atom, 37 | type: :destroy 38 | } 39 | 40 | @doc false 41 | def __entity__ do 42 | %Spark.Dsl.Entity{ 43 | name: :destroy, 44 | describe: """ 45 | Generate a mix task which calls a destroy action and removes a single record 46 | by primary key or identity. 47 | 48 | ## Example 49 | 50 | Defining the following `destroy` in your domain: 51 | 52 | ```elixir 53 | mix_tasks do 54 | destroy Post, :destroy_post, :destroy 55 | end 56 | ``` 57 | 58 | Will result in the following mix task being available: 59 | 60 | ```bash 61 | mix my_app.blog.destroy_post "01953abc-c4e9-7661-a79a-243b0d982ab7" 62 | status: ok 63 | ``` 64 | """, 65 | target: __MODULE__, 66 | identifier: :name, 67 | args: [:resource, :name, :action], 68 | schema: [ 69 | action: [ 70 | type: :atom, 71 | required: true, 72 | doc: "The name of the destroy action to use" 73 | ], 74 | arguments: [ 75 | type: {:wrap_list, :atom}, 76 | required: false, 77 | default: [], 78 | doc: 79 | "A list of action arguments which should be taken as positional arguments on the command line" 80 | ], 81 | description: [ 82 | type: :string, 83 | required: false, 84 | doc: "Documentation to be displayed in the mix task's help section" 85 | ], 86 | name: [ 87 | type: :atom, 88 | required: true, 89 | doc: "The name of the mix task to generate" 90 | ], 91 | prefix: [ 92 | type: :atom, 93 | required: false, 94 | doc: 95 | "The prefix to use for the mix task name (ie the part before the first \".\"). Defaults to the `otp_app` setting of the domain" 96 | ], 97 | read_action: [ 98 | type: :atom, 99 | required: false, 100 | doc: 101 | "The read action to use to query for matching records to destroy. Defaults to the primary read action." 102 | ], 103 | resource: [ 104 | type: {:spark, Ash.Resource}, 105 | required: true, 106 | doc: "The resource whose action to use" 107 | ] 108 | ] 109 | } 110 | end 111 | end 112 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.MixProject do 6 | use Mix.Project 7 | 8 | @moduledoc "An Ash extension which generates mix tasks for Ash actions" 9 | @version "0.2.4" 10 | def project do 11 | [ 12 | aliases: aliases(), 13 | app: :ash_ops, 14 | compilers: compilers(Mix.env()), 15 | consolidate_protocols: Mix.env() != :dev, 16 | deps: deps(), 17 | description: @moduledoc, 18 | dialyzer: [plt_add_apps: [:mix]], 19 | docs: docs(), 20 | elixir: "~> 1.18", 21 | elixirc_paths: elixirc_paths(Mix.env()), 22 | package: package(), 23 | start_permanent: Mix.env() == :prod, 24 | version: @version 25 | ] 26 | end 27 | 28 | defp package do 29 | [ 30 | maintainers: [ 31 | "James Harton " 32 | ], 33 | licenses: ["MIT"], 34 | links: %{ 35 | "GitHub" => "https://github.com/ash-project/ash_ops", 36 | "Changelog" => "https://github.com/ash-project/ash_ops/blob/main/CHANGELOG.md", 37 | "Discord" => "https://discord.gg/HTHRaaVPUc", 38 | "Website" => "https://ash-hq.org", 39 | "Forum" => "https://elixirforum.com/c/elixir-framework-forums/ash-framework-forum", 40 | "REUSE Compliance" => "https://api.reuse.software/info/github.com/ash-project/ash_ops" 41 | }, 42 | source_url: "https://github.com/ash-project/ash_ops", 43 | files: ~w[lib src .formatter.exs mix.exs README* LICENSE* CHANGELOG* documentation] 44 | ] 45 | end 46 | 47 | def application do 48 | [ 49 | extra_applications: [:logger] 50 | ] 51 | end 52 | 53 | defp docs do 54 | [ 55 | main: "readme", 56 | extras: ["README.md", "CHANGELOG.md", "documentation/dsls/DSL-AshOps.md"], 57 | filter_modules: ~r/^Elixir\.AshOps/ 58 | ] 59 | end 60 | 61 | # Run "mix help deps" to learn about dependencies. 62 | defp deps do 63 | [ 64 | {:ash, "~> 3.0"}, 65 | {:jason, "~> 1.0"}, 66 | {:spark, "~> 2.0"}, 67 | {:splode, "~> 0.2"}, 68 | {:yaml_elixir, "~> 2.11"}, 69 | {:ymlr, "~> 5.0"}, 70 | {:credo, "~> 1.0", only: [:dev, :test], runtime: false}, 71 | {:dialyxir, "~> 1.0", only: [:dev, :test], runtime: false}, 72 | {:doctor, "~> 0.22", only: [:dev, :test], runtime: false}, 73 | {:ex_check, "~> 0.16", only: [:dev, :test], runtime: false}, 74 | {:ex_doc, "~> 0.37", only: [:dev, :test], runtime: false}, 75 | {:faker, "~> 0.18", only: [:dev, :test]}, 76 | {:git_ops, "~> 2.0", only: [:dev, :test], runtime: false}, 77 | {:igniter, "~> 0.5", only: [:dev, :test], optional: true}, 78 | {:neotoma_compiler, "~> 0.1", only: [:dev, :test], runtime: false}, 79 | {:mix_audit, "~> 2.0", only: [:dev, :test], runtime: false}, 80 | {:simple_sat, "~> 0.1", only: [:dev, :test]}, 81 | {:sobelow, "~> 0.13", only: [:dev, :test], runtime: false}, 82 | {:sourceror, "~> 1.7", only: [:dev, :test], optional: true} 83 | ] 84 | end 85 | 86 | defp aliases do 87 | [ 88 | "spark.formatter": "spark.formatter --extensions AshOps", 89 | "spark.cheat_sheets": "spark.cheat_sheets --extensions AshOps", 90 | docs: ["spark.cheat_sheets", "docs"], 91 | credo: "credo --strict" 92 | ] 93 | end 94 | 95 | defp elixirc_paths(env) when env in [:dev, :test], do: ["lib", "test/support"] 96 | defp elixirc_paths(_), do: ["lib"] 97 | 98 | defp compilers(env) when env in ~w[dev test]a, do: [:neotoma | Mix.compilers()] 99 | defp compilers(_env), do: Mix.compilers() 100 | end 101 | -------------------------------------------------------------------------------- /lib/ash_ops/task/action.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.Action do 6 | @moduledoc """ 7 | Provides the implementation of the `action` mix task. 8 | 9 | This should only ever be called from the mix task itself. 10 | """ 11 | alias Ash.{ActionInput, Resource.Info} 12 | alias AshOps.Task.ArgSchema 13 | import AshOps.Task.Common 14 | 15 | @doc false 16 | def run(argv, task, arg_schema) do 17 | with {:ok, cfg} <- ArgSchema.parse(arg_schema, argv), 18 | {:ok, actor} <- load_actor(cfg[:actor], cfg[:tenant]), 19 | cfg <- Map.put(cfg, :actor, actor), 20 | {:ok, result} <- run_action(task, cfg), 21 | {:ok, result} <- maybe_load(result, task, cfg), 22 | {:ok, output} <- serialise_result(result, cfg) do 23 | Mix.shell().info(output) 24 | 25 | :ok 26 | else 27 | {:error, reason} -> handle_error({:error, reason}) 28 | end 29 | end 30 | 31 | defp maybe_load(result, task, cfg) do 32 | if record_or_records?(result) do 33 | {load, opts} = 34 | cfg 35 | |> Map.take([:load, :actor, :tenant]) 36 | |> Map.put(:domain, task.domain) 37 | |> Keyword.new() 38 | |> Keyword.pop(:load) 39 | 40 | if load == [] do 41 | {:ok, result} 42 | else 43 | Ash.load(result, load, opts) 44 | end 45 | else 46 | {:ok, result} 47 | end 48 | end 49 | 50 | defp record_or_records?([%struct{} | _]) do 51 | Info.resource?(struct) 52 | end 53 | 54 | defp record_or_records?(%struct{}) do 55 | Info.resource?(struct) 56 | end 57 | 58 | defp record_or_records?(_), do: false 59 | 60 | defp run_action(task, cfg) do 61 | args = 62 | cfg 63 | |> Map.get(:positional_arguments, %{}) 64 | 65 | opts = 66 | cfg 67 | |> Map.take([:load, :actor, :tenant]) 68 | |> Map.put(:domain, task.domain) 69 | |> Enum.to_list() 70 | 71 | task.resource 72 | |> ActionInput.for_action(task.action.name, args) 73 | |> Ash.run_action(opts) 74 | |> case do 75 | :ok -> {:ok, :ok} 76 | {:ok, result} -> {:ok, result} 77 | {:error, reason} -> {:error, reason} 78 | end 79 | end 80 | 81 | defp serialise_result(result, cfg) do 82 | if record_or_records?(result) do 83 | if is_list(result) do 84 | serialise_records(result, hd(result).__struct__, cfg) 85 | else 86 | serialise_record(result, result.__struct__, cfg) 87 | end 88 | else 89 | serialise_generic_result(result, cfg) 90 | end 91 | end 92 | 93 | defp serialise_generic_result(result, cfg) when cfg.format == :yaml do 94 | result 95 | |> Ymlr.document() 96 | |> case do 97 | {:ok, yaml} -> {:ok, String.replace_leading(yaml, "---\n", "")} 98 | {:error, reason} -> {:error, reason} 99 | end 100 | end 101 | 102 | defp serialise_generic_result(result, cfg) when cfg.format == :json do 103 | result 104 | |> Jason.encode(pretty: true) 105 | end 106 | 107 | @doc false 108 | defmacro __using__(opts) do 109 | quote generated: true do 110 | @task unquote(opts[:task]) 111 | @arg_schema ArgSchema.default(@task) 112 | 113 | @shortdoc "Run the `#{@task.action.name}` action on the `#{inspect(@task.resource)}` resource." 114 | 115 | @moduledoc """ 116 | #{@shortdoc} 117 | 118 | 119 | #{if @task.description, do: "#{@task.description}\n\n"} 120 | #{if @task.action.description, do: """ 121 | ## Action 122 | 123 | #{@task.action.description} 124 | 125 | """} 126 | ## Usage 127 | 128 | #{ArgSchema.usage(@task, @arg_schema)} 129 | """ 130 | use Mix.Task 131 | 132 | @requirements ["app.start"] 133 | 134 | @impl Mix.Task 135 | def run(args) do 136 | unquote(__MODULE__).run(args, @task, @arg_schema) 137 | end 138 | end 139 | end 140 | end 141 | -------------------------------------------------------------------------------- /test/ash_ops/task/list_test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.ListTest do 6 | @moduledoc false 7 | use ExUnit.Case, async: true 8 | import ExUnit.CaptureIO 9 | 10 | setup do 11 | ansi_enabled? = Application.get_env(:elixir, :ansi_enabled) 12 | Application.put_env(:elixir, :ansi_enabled, false) 13 | 14 | on_exit(fn -> 15 | Application.put_env(:elixir, :ansi_enabled, ansi_enabled?) 16 | end) 17 | 18 | posts = 19 | 1..3 20 | |> Enum.map(fn i -> 21 | Example.create_post!(%{ 22 | title: "#{i}: #{Faker.Food.dish()}", 23 | body: Faker.Food.description(), 24 | slug: Faker.Internet.slug(), 25 | tenant: Faker.Lorem.word() 26 | }) 27 | end) 28 | 29 | {:ok, posts: posts} 30 | end 31 | 32 | test "all records are retrieved by default", %{posts: [post0, post1, post2]} do 33 | output = 34 | capture_io(fn -> 35 | Mix.Task.rerun("ash_ops.example.list_posts") 36 | end) 37 | 38 | assert output =~ ~r/id: #{post0.id}\n/m 39 | assert output =~ ~r/id: #{post1.id}\n/m 40 | assert output =~ ~r/id: #{post2.id}\n/m 41 | end 42 | 43 | test "records can be filtered by a filter argument", %{posts: [post0, post1, post2]} do 44 | output = 45 | capture_io(fn -> 46 | Mix.Task.rerun("ash_ops.example.list_posts", ["--filter", "id == '#{post1.id}'"]) 47 | end) 48 | 49 | refute output =~ ~r/id: #{post0.id}\n/m 50 | assert output =~ ~r/id: #{post1.id}\n/m 51 | refute output =~ ~r/id: #{post2.id}\n/m 52 | end 53 | 54 | test "records can be filtered by a filter on STDIN", %{posts: [post0, post1, post2]} do 55 | output = 56 | capture_io(:stdio, "id == '#{post1.id}'", fn -> 57 | Mix.Task.rerun("ash_ops.example.list_posts", ["--filter-stdin"]) 58 | end) 59 | 60 | refute output =~ ~r/id: #{post0.id}\n/m 61 | assert output =~ ~r/id: #{post1.id}\n/m 62 | refute output =~ ~r/id: #{post2.id}\n/m 63 | end 64 | 65 | test "an offset can be applied", %{posts: [post0, post1, post2]} do 66 | output = 67 | capture_io(fn -> 68 | Mix.Task.rerun("ash_ops.example.list_posts", ["--offset", "2"]) 69 | end) 70 | 71 | refute output =~ ~r/id: #{post0.id}\n/m 72 | refute output =~ ~r/id: #{post1.id}\n/m 73 | assert output =~ ~r/id: #{post2.id}\n/m 74 | end 75 | 76 | test "a limit can be applied", %{posts: [post0, post1, post2]} do 77 | output = 78 | capture_io(fn -> 79 | Mix.Task.rerun("ash_ops.example.list_posts", ["--limit", "2"]) 80 | end) 81 | 82 | assert output =~ ~r/id: #{post0.id}\n/m 83 | assert output =~ ~r/id: #{post1.id}\n/m 84 | refute output =~ ~r/id: #{post2.id}\n/m 85 | end 86 | 87 | test "a sort can be applied", %{posts: [post0, post1, post2]} do 88 | output = 89 | capture_io(fn -> 90 | Mix.Task.rerun("ash_ops.example.list_posts", ["--sort", "'-title'", "--limit", "1"]) 91 | end) 92 | 93 | refute output =~ ~r/id: #{post0.id}\n/m 94 | refute output =~ ~r/id: #{post1.id}\n/m 95 | assert output =~ ~r/id: #{post2.id}\n/m 96 | end 97 | 98 | test "it can filter by tenant", %{posts: posts} do 99 | tenant = 100 | posts 101 | |> Enum.random() 102 | |> Map.fetch!(:tenant) 103 | 104 | matching_posts = Enum.filter(posts, &(&1.tenant == tenant)) 105 | non_matching_posts = Enum.filter(posts, &(&1.tenant != tenant)) 106 | 107 | output = 108 | capture_io(fn -> 109 | Mix.Task.rerun("ash_ops.example.list_posts", ["--tenant", tenant]) 110 | end) 111 | 112 | for post <- matching_posts do 113 | assert output =~ ~r/id: #{post.id}\n/m 114 | end 115 | 116 | for post <- non_matching_posts do 117 | refute output =~ ~r/id: #{post.id}\n/m 118 | end 119 | end 120 | 121 | test "when the provided tenant is invalid, it fails" do 122 | output = 123 | capture_io(fn -> 124 | Mix.Task.rerun("ash_ops.example.list_posts", ["--tenant", "Marty McFly"]) 125 | end) 126 | |> String.trim() 127 | 128 | assert output == "" 129 | end 130 | end 131 | -------------------------------------------------------------------------------- /test/ash_ops/task/get_test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.GetTest do 6 | @moduledoc false 7 | use ExUnit.Case, async: true 8 | alias Ash.Resource.Info 9 | import ExUnit.CaptureIO 10 | 11 | setup do 12 | ansi_enabled? = Application.get_env(:elixir, :ansi_enabled) 13 | Application.put_env(:elixir, :ansi_enabled, false) 14 | 15 | on_exit(fn -> 16 | Application.put_env(:elixir, :ansi_enabled, ansi_enabled?) 17 | end) 18 | 19 | post = 20 | Example.create_post!(%{ 21 | title: Faker.Food.dish(), 22 | body: Faker.Food.description(), 23 | slug: Faker.Internet.slug(), 24 | tenant: Faker.Lorem.word() 25 | }) 26 | 27 | {:ok, post: post} 28 | end 29 | 30 | test "it can be retrieved by it's public key", %{post: post} do 31 | output = 32 | capture_io(fn -> 33 | Mix.Task.rerun("ash_ops.example.get_post", [to_string(post.id)]) 34 | end) 35 | 36 | assert output =~ ~r/id: #{post.id}/m 37 | end 38 | 39 | test "it displays public attributes by default", %{post: post} do 40 | output = 41 | capture_io(fn -> 42 | Mix.Task.rerun("ash_ops.example.get_post", [to_string(post.id)]) 43 | end) 44 | 45 | public_attributes = 46 | Example.Post 47 | |> Info.public_attributes() 48 | |> Enum.map(& &1.name) 49 | 50 | for field <- public_attributes do 51 | value = 52 | post 53 | |> Map.fetch!(field) 54 | |> to_string() 55 | |> Regex.escape() 56 | 57 | assert output =~ ~r/#{field}: #{value}/m 58 | end 59 | end 60 | 61 | test "it can use a provided tenant", %{post: post} do 62 | output = 63 | capture_io(fn -> 64 | Mix.Task.rerun("ash_ops.example.get_post", ["--tenant", post.tenant, to_string(post.id)]) 65 | end) 66 | 67 | assert output =~ ~r/id: #{post.id}/m 68 | end 69 | 70 | test "when the provided tenant is invalid, it fails", %{post: post} do 71 | output = 72 | capture_io(:stderr, fn -> 73 | Mix.Task.rerun("ash_ops.example.get_post", [ 74 | "--tenant", 75 | "Marty McFly", 76 | to_string(post.id) 77 | ]) 78 | end) 79 | 80 | assert output =~ ~r/not found/im 81 | end 82 | 83 | test "it can format the output as JSON", %{post: post} do 84 | output = 85 | capture_io(fn -> 86 | Mix.Task.rerun("ash_ops.example.get_post", ["--format", "json", to_string(post.id)]) 87 | end) 88 | 89 | assert output =~ ~r/"id": #{inspect(post.id)},/m 90 | end 91 | 92 | test "it can use an identity to find the record", %{post: post} do 93 | output = 94 | capture_io(fn -> 95 | Mix.Task.rerun("ash_ops.example.get_post", [ 96 | "--identity", 97 | "unique_slug", 98 | to_string(post.slug) 99 | ]) 100 | end) 101 | 102 | assert output =~ ~r/id: #{post.id}/m 103 | end 104 | 105 | test "when an actor is provided and is authorised, it is successful", %{post: post} do 106 | actor = Example.create_actor!(%{is_good: true}) 107 | 108 | output = 109 | capture_io(fn -> 110 | Mix.Task.rerun("ash_ops.example.get_post", [ 111 | "--actor", 112 | "Example.Actor:#{actor.id}", 113 | to_string(post.id) 114 | ]) 115 | end) 116 | 117 | assert output =~ ~r/id: #{post.id}/m 118 | end 119 | 120 | test "when the actor is provided and is not authorised, it fails", %{post: post} do 121 | actor = Example.create_actor!(%{is_good: false}) 122 | 123 | output = 124 | capture_io(:stderr, fn -> 125 | Mix.Task.rerun("ash_ops.example.get_post", [ 126 | "--actor", 127 | "Example.Actor:#{actor.id}", 128 | to_string(post.id) 129 | ]) 130 | end) 131 | 132 | assert output =~ ~r/forbidden/im 133 | end 134 | 135 | test "when the post doesn't exist, it fails" do 136 | output = 137 | capture_io(:stderr, fn -> 138 | Mix.Task.rerun("ash_ops.example.get_post", [to_string(Ash.UUID.generate())]) 139 | end) 140 | 141 | assert output =~ ~r/not found/im 142 | end 143 | 144 | test "calculations can be loaded and returned", %{post: post} do 145 | output = 146 | capture_io(fn -> 147 | Mix.Task.rerun("ash_ops.example.get_post", [ 148 | "--load", 149 | "length", 150 | to_string(post.id) 151 | ]) 152 | end) 153 | 154 | assert output =~ ~r/id: #{post.id}/m 155 | assert output =~ ~r/length: #{byte_size(post.body)}/m 156 | end 157 | 158 | test "relationships can be loaded and returned", %{post: post} do 159 | author = Example.create_actor!(%{is_good: false}) 160 | post = Example.update_post!(post, %{author_id: author.id}) 161 | 162 | output = 163 | capture_io(fn -> 164 | Mix.Task.rerun("ash_ops.example.get_post", [ 165 | "--load", 166 | "author.id", 167 | to_string(post.id) 168 | ]) 169 | end) 170 | 171 | assert output =~ ~r/id: #{post.id}/m 172 | assert output =~ ~r/author:\n id: #{author.id}/m 173 | end 174 | end 175 | -------------------------------------------------------------------------------- /lib/ash_ops/task/list.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.List do 6 | @moduledoc """ 7 | Provides the implementation of the `list` mix task. 8 | 9 | This should only ever be called from the mix task itself. 10 | """ 11 | alias Ash.Query 12 | alias AshOps.{QueryLang, Task.ArgSchema} 13 | 14 | import AshOps.Task.Common 15 | 16 | @doc false 17 | def run(argv, task, arg_schema) do 18 | with {:ok, cfg} <- ArgSchema.parse(arg_schema, argv), 19 | {:ok, query} <- read_filter(cfg), 20 | {:ok, query} <- QueryLang.parse(task, query), 21 | {:ok, actor} <- load_actor(cfg[:actor], cfg[:tenant]), 22 | {:ok, records} <- load_records(query, task, Map.put(cfg, :actor, actor)), 23 | {:ok, output} <- serialise_records(records, task.resource, cfg) do 24 | Mix.shell().info(output) 25 | 26 | :ok 27 | else 28 | {:error, reason} -> handle_error({:error, reason}) 29 | end 30 | end 31 | 32 | defp load_records(query, task, cfg) do 33 | opts = 34 | cfg 35 | |> Map.take([:load, :actor, :tenant]) 36 | |> Map.put(:domain, task.domain) 37 | |> Enum.to_list() 38 | 39 | query 40 | |> maybe_add_limit(cfg[:limit]) 41 | |> maybe_add_offset(cfg[:offset]) 42 | |> maybe_add_sort(cfg[:sort]) 43 | |> Ash.read(opts) 44 | end 45 | 46 | defp maybe_add_limit(query, nil), do: query 47 | 48 | defp maybe_add_limit(query, limit) when is_integer(limit) and limit >= 0, 49 | do: Query.limit(query, limit) 50 | 51 | defp maybe_add_offset(query, nil), do: query 52 | 53 | defp maybe_add_offset(query, offset) when is_integer(offset) and offset >= 0, 54 | do: Query.offset(query, offset) 55 | 56 | defp maybe_add_sort(query, nil), do: query 57 | 58 | defp maybe_add_sort(query, sort), 59 | do: Query.sort_input(query, sort) 60 | 61 | defp read_filter(cfg) when is_binary(cfg.filter) and cfg.filter_stdin == true, 62 | do: {:error, "Cannot set both `filter` and `filter-stdin` at the same time"} 63 | 64 | defp read_filter(cfg) when cfg.filter_stdin == true do 65 | case IO.read(:eof) do 66 | {:error, reason} -> {:error, "Unable to read query from STDIN: #{inspect(reason)}"} 67 | :eof -> {:error, "No query received on STDIN"} 68 | filter -> {:ok, filter} 69 | end 70 | end 71 | 72 | defp read_filter(cfg) when is_binary(cfg.filter), do: {:ok, cfg.filter} 73 | defp read_filter(_), do: {:ok, nil} 74 | 75 | @doc false 76 | defmacro __using__(opts) do 77 | quote generated: true do 78 | @task unquote(opts[:task]) 79 | @arg_schema @task 80 | |> ArgSchema.default() 81 | |> ArgSchema.add_switch( 82 | :filter_stdin, 83 | :count, 84 | type: {:custom, AshOps.Task.Types, :filter_stdin, []}, 85 | required: false, 86 | doc: "Read a JSON or YAML filter from STDIN" 87 | ) 88 | |> ArgSchema.add_switch( 89 | :filter, 90 | :string, 91 | type: {:custom, AshOps.Task.Types, :filter, []}, 92 | required: false, 93 | doc: "A filter to apply to the filter" 94 | ) 95 | |> ArgSchema.add_switch( 96 | :limit, 97 | :integer, 98 | type: :non_neg_integer, 99 | doc: "An optional limit to put on the number of records returned", 100 | required: false 101 | ) 102 | |> ArgSchema.add_switch( 103 | :offset, 104 | :integer, 105 | type: :non_neg_integer, 106 | required: false, 107 | doc: "An optional number of records to skip" 108 | ) 109 | |> ArgSchema.add_switch( 110 | :sort, 111 | :string, 112 | type: {:custom, AshOps.Task.Types, :sort_input, []}, 113 | required: false, 114 | doc: "An optional sort to apply to the query" 115 | ) 116 | 117 | @shortdoc "Query for `#{inspect(@task.resource)}` records using the `#{@task.action.name}` action" 118 | 119 | @moduledoc """ 120 | #{@shortdoc} 121 | 122 | #{if @task.description, do: "#{@task.description}\n\n"} 123 | 124 | #{if @task.action.description, do: """ 125 | ## Action 126 | 127 | #{@task.action.description} 128 | 129 | """} 130 | ## Usage 131 | 132 | Without a query, this task will return all records returned by the 133 | `#{@task.action.name}` read action. You can optionally provide a query 134 | using the filter language documented below to provide additional filters 135 | into the query. 136 | 137 | ## Filters 138 | 139 | #{AshOps.QueryLang.doc()} 140 | 141 | ## Sorting 142 | 143 | You can use [Ash's text based sort format](https://hexdocs.pm/ash/Ash.Query.html#sort/3-format) 144 | to provide a sorting order for the returned records. 145 | 146 | #{ArgSchema.usage(@task, @arg_schema)} 147 | """ 148 | use Mix.Task 149 | 150 | @requirements ["app.start"] 151 | 152 | @impl Mix.Task 153 | def run(args) do 154 | unquote(__MODULE__).run(args, @task, @arg_schema) 155 | end 156 | end 157 | end 158 | end 159 | -------------------------------------------------------------------------------- /test/ash_ops/query_lang_test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.QueryLangTest do 6 | @moduledoc false 7 | use ExUnit.Case, async: true 8 | 9 | alias Ash.{Filter, Query} 10 | alias AshOps.{Info, QueryLang} 11 | require Query 12 | 13 | setup do 14 | task = Info.mix_task!(Example, :get_post) 15 | 16 | query = 17 | task.resource 18 | |> Query.new() 19 | |> Query.for_read(task.action.name) 20 | 21 | {:ok, task: task, query: query} 22 | end 23 | 24 | describe "literals" do 25 | test "integer", %{task: task, query: query} do 26 | assert {:ok, parsed} = QueryLang.parse(task, "length == 123") 27 | assert parsed == Query.filter_input(query, length: [eq: 123]) 28 | end 29 | 30 | test "negative integer", %{task: task, query: query} do 31 | assert {:ok, parsed} = QueryLang.parse(task, "length == -123") 32 | assert parsed == Query.filter_input(query, length: [eq: -123]) 33 | end 34 | 35 | test "float", %{task: task, query: query} do 36 | assert {:ok, parsed} = QueryLang.parse(task, "length == 123.21") 37 | assert parsed == Query.filter_input(query, length: [eq: 123.21]) 38 | end 39 | 40 | test "negative float", %{task: task, query: query} do 41 | assert {:ok, parsed} = QueryLang.parse(task, "length == -123.21") 42 | assert parsed == Query.filter_input(query, length: [eq: -123.21]) 43 | end 44 | 45 | test "boolean true", %{task: task, query: query} do 46 | assert {:ok, parsed} = QueryLang.parse(task, "long == true") 47 | assert parsed == Query.filter_input(query, long: [eq: true]) 48 | end 49 | 50 | test "boolean false", %{task: task, query: query} do 51 | assert {:ok, parsed} = QueryLang.parse(task, "long == false") 52 | assert parsed == Query.filter_input(query, long: [eq: false]) 53 | end 54 | 55 | test "single tick string", %{task: task, query: query} do 56 | assert {:ok, parsed} = QueryLang.parse(task, "title == 'Marty McFly'") 57 | assert parsed == Query.filter_input(query, title: [eq: "Marty McFly"]) 58 | end 59 | 60 | test "double tick string", %{task: task, query: query} do 61 | assert {:ok, parsed} = QueryLang.parse(task, "title == \"Marty McFly\"") 62 | assert parsed == Query.filter_input(query, title: [eq: "Marty McFly"]) 63 | end 64 | end 65 | 66 | describe "infix" do 67 | test "eq", %{task: task, query: query} do 68 | assert {:ok, parsed} = QueryLang.parse(task, "title == 'Marty McFly'") 69 | assert parsed == Query.filter_input(query, title: [eq: "Marty McFly"]) 70 | end 71 | 72 | test "neq", %{task: task, query: query} do 73 | assert {:ok, parsed} = QueryLang.parse(task, "title != 'Marty McFly'") 74 | assert parsed == Query.filter_input(query, title: [not_equals: "Marty McFly"]) 75 | end 76 | 77 | test "gt", %{task: task, query: query} do 78 | assert {:ok, parsed} = QueryLang.parse(task, "length > 3") 79 | assert parsed == Query.filter_input(query, length: [gt: 3]) 80 | end 81 | 82 | test "gte", %{task: task, query: query} do 83 | assert {:ok, parsed} = QueryLang.parse(task, "length >= 3") 84 | assert parsed == Query.filter_input(query, length: [gte: 3]) 85 | end 86 | 87 | test "lt", %{task: task, query: query} do 88 | assert {:ok, parsed} = QueryLang.parse(task, "length < 3") 89 | assert parsed == Query.filter_input(query, length: [lt: 3]) 90 | end 91 | 92 | test "lte", %{task: task, query: query} do 93 | assert {:ok, parsed} = QueryLang.parse(task, "length <= 3") 94 | assert parsed == Query.filter_input(query, length: [lte: 3]) 95 | end 96 | 97 | test "in", %{task: task, query: query} do 98 | assert {:ok, parsed} = QueryLang.parse(task, "title in ['Marty McFly', 'Doc Brown']") 99 | assert parsed == Query.filter_input(query, title: [in: ["Marty McFly", "Doc Brown"]]) 100 | end 101 | 102 | test "times", %{task: task, query: query} do 103 | assert {:ok, parsed} = QueryLang.parse(task, "length * 3") 104 | assert parsed == Query.filter_input(query, length: [times: 3]) 105 | end 106 | 107 | test "div", %{task: task, query: query} do 108 | assert {:ok, parsed} = QueryLang.parse(task, "length / 3") 109 | assert parsed == Query.filter_input(query, length: [div: 3]) 110 | end 111 | 112 | test "plus", %{task: task, query: query} do 113 | assert {:ok, parsed} = QueryLang.parse(task, "length + 3") 114 | assert parsed == Query.filter_input(query, length: [plus: 3]) 115 | end 116 | 117 | test "minus", %{task: task, query: query} do 118 | assert {:ok, parsed} = QueryLang.parse(task, "length - 3") 119 | assert parsed == Query.filter_input(query, length: [minus: 3]) 120 | end 121 | 122 | test "concat", %{task: task, query: query} do 123 | assert {:ok, parsed} = QueryLang.parse(task, "title <> slug") 124 | assert Enum.all?(Filter.list_refs(parsed), & &1.input?) 125 | assert inspect(parsed) == inspect(Query.filter(query, title <> slug)) 126 | end 127 | end 128 | 129 | describe "infix precedence" do 130 | test "and and or", %{task: task, query: query} do 131 | assert {:ok, parsed} = 132 | QueryLang.parse( 133 | task, 134 | "title == 'Marty McFly' || title == 'Doc Brown' && slug == 'doc-brown'" 135 | ) 136 | 137 | assert Enum.all?(Filter.list_refs(parsed), & &1.input?) 138 | 139 | assert parsed == 140 | Query.filter_input( 141 | query, 142 | or: [ 143 | [title: [eq: "Marty McFly"]], 144 | [title: [eq: "Doc Brown"], slug: "doc-brown"] 145 | ] 146 | ) 147 | end 148 | 149 | test "arithmetic", %{task: task, query: query} do 150 | assert {:ok, parsed} = QueryLang.parse(task, "length * 3 + 2 == 6") 151 | assert inspect(parsed) == inspect(Query.filter(query, length * 3 + 2 == 6)) 152 | end 153 | end 154 | 155 | describe "functions" do 156 | test "fragment", %{task: task, query: query} do 157 | assert {:ok, parsed} = QueryLang.parse(task, "fragment('lower(?)', name) == 'fred'") 158 | assert inspect(parsed) == inspect(Query.filter(query, fragment("lower(?)", name) == "fred")) 159 | end 160 | end 161 | end 162 | -------------------------------------------------------------------------------- /lib/ash_ops/task/common.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.Common do 6 | @moduledoc """ 7 | Common behaviour for all tasks. 8 | """ 9 | alias Ash.{Query, Resource.Info} 10 | require Query 11 | 12 | @doc """ 13 | Given a tuple containing a resource and a filter statement, use it to load the 14 | actor. 15 | """ 16 | @spec load_actor(nil | {Ash.Resource.t(), String.t()}, String.t()) :: 17 | {:ok, nil | Ash.Resource.record()} | {:error, any} 18 | def load_actor(nil, _tenant), do: {:ok, nil} 19 | 20 | def load_actor({resource, filter}, tenant) do 21 | resource 22 | |> Query.new() 23 | |> Query.filter_input(filter) 24 | |> Ash.read_one(authorize?: false, tenant: tenant) 25 | end 26 | 27 | @doc """ 28 | Format an error for display and exit with a non-zero status. 29 | """ 30 | @spec handle_error({:error, any}) :: no_return 31 | def handle_error({:error, reason}) when is_exception(reason) do 32 | reason 33 | |> Exception.message() 34 | |> Mix.shell().error() 35 | 36 | stop() 37 | end 38 | 39 | def handle_error({:error, reason}) when is_binary(reason) do 40 | reason 41 | |> Mix.shell().error() 42 | 43 | stop() 44 | end 45 | 46 | def handle_error({:error, reason}) do 47 | reason 48 | |> inspect() 49 | |> Mix.shell().error() 50 | 51 | stop() 52 | end 53 | 54 | @doc "Return the filter field for the configured identity, or the primary key" 55 | def identity_or_pk_field(resource, cfg) 56 | when is_atom(cfg.identity) and not is_nil(cfg.identity) do 57 | case Info.identity(resource, cfg.identity) do 58 | %{keys: [field]} -> {:ok, field} 59 | _ -> {:error, "Composite identity error"} 60 | end 61 | end 62 | 63 | def identity_or_pk_field(resource, _cfg) do 64 | case Info.primary_key(resource) do 65 | [pk] -> {:ok, pk} 66 | _ -> {:error, "Primary key error"} 67 | end 68 | end 69 | 70 | @doc "Serialise the record for display" 71 | def serialise_record(record, resource, cfg) do 72 | data = prepare_record(record, resource, cfg) 73 | 74 | case cfg.format do 75 | :yaml -> 76 | data 77 | |> Ymlr.document() 78 | |> case do 79 | {:ok, yaml} -> {:ok, String.replace_leading(yaml, "---\n", "")} 80 | {:error, reason} -> {:error, reason} 81 | end 82 | 83 | :json -> 84 | data 85 | |> Jason.encode(pretty: true) 86 | end 87 | end 88 | 89 | @doc "Serialise a list of records for display" 90 | def serialise_records(records, resource, cfg) when cfg.format == :yaml do 91 | with {:ok, outputs} <- 92 | Enum.reduce_while(records, {:ok, []}, fn record, {:ok, outputs} -> 93 | case serialise_record(record, resource, cfg) do 94 | {:ok, output} -> {:cont, {:ok, [output | outputs]}} 95 | {:error, reason} -> {:halt, {:error, reason}} 96 | end 97 | end) do 98 | outputs = 99 | outputs 100 | |> Enum.reverse() 101 | |> Enum.join("---\n") 102 | 103 | {:ok, outputs} 104 | end 105 | end 106 | 107 | def serialise_records(records, resource, cfg) when cfg.format == :json do 108 | records 109 | |> Enum.map(&prepare_record(&1, resource, cfg)) 110 | |> Jason.encode(pretty: true) 111 | end 112 | 113 | def serialise_records(records, resource, cfg), 114 | do: serialise_records(records, resource, Map.put(cfg, :format, :yaml)) 115 | 116 | # Filter and format record fields, but do not encode 117 | defp prepare_record(record, resource, cfg) do 118 | record 119 | |> filter_record(resource, cfg) 120 | |> format_record(resource, cfg) 121 | end 122 | 123 | # Apply formatting to each field of a filtered record 124 | defp format_record(record, resource, cfg) do 125 | Map.new(record, fn 126 | {key, value} -> 127 | field_info = resource |> Info.field(key) 128 | {key, format_value(value, field_info, cfg)} 129 | end) 130 | end 131 | 132 | @doc """ 133 | Format a value given the field type info and formatting configuration options 134 | """ 135 | def format_value(value, field_info, cfg) 136 | 137 | # NOTE: In future, dispatch on the type, not the value to support new types 138 | def format_value(%Ash.CiString{} = value, field_info, cfg) do 139 | format_value(to_string(value), field_info, cfg) 140 | end 141 | 142 | def format_value(nil, _field, %{format: :yaml}) do 143 | "nil" 144 | end 145 | 146 | def format_value(value, attribute = %{type: {:array, type}}, cfg) when is_list(value) do 147 | inner_type = type 148 | inner_constraints = attribute.constraints[:items] || [] 149 | inner_attribute = %{attribute | type: inner_type, constraints: inner_constraints} 150 | Enum.map(value, &format_value(&1, inner_attribute, cfg)) 151 | end 152 | 153 | # HasMany or ManyToMany relationships 154 | def format_value(value, attribute = %{cardinality: :many}, cfg) when is_list(value) do 155 | Enum.map(value, &format_value(&1, attribute, cfg)) 156 | end 157 | 158 | def format_value(%struct{} = value, field_info, cfg) do 159 | if Info.resource?(struct) do 160 | load = cfg[:load][field_info.name] || [] 161 | cfg = Map.put(cfg, :load, load) 162 | prepare_record(value, struct, cfg) 163 | else 164 | format_fallback_value(value, cfg) 165 | end 166 | end 167 | 168 | def format_value(value, _field_info, cfg) do 169 | format_fallback_value(value, cfg) 170 | end 171 | 172 | defp format_fallback_value(value, %{format: :json}) do 173 | if Jason.Encoder.impl_for(value) do 174 | value 175 | else 176 | "" 177 | end 178 | end 179 | 180 | defp format_fallback_value(value, %{format: :yaml}) do 181 | if Ymlr.Encoder.impl_for(value) do 182 | value 183 | else 184 | "" 185 | end 186 | end 187 | 188 | # Convert a record to a plain map, excluding private fields 189 | defp filter_record(record, _resource, cfg) do 190 | record 191 | |> Info.public_fields() 192 | |> Enum.map(& &1.name) 193 | |> Enum.concat(cfg[:load] || []) 194 | |> do_filter_record(record) 195 | end 196 | 197 | defp do_filter_record(fields, record, result \\ %{}) 198 | defp do_filter_record([], _record, result), do: result 199 | 200 | defp do_filter_record([field | fields], record, result) when is_atom(field) do 201 | case Map.fetch!(record, field) do 202 | not_loaded when is_struct(not_loaded, Ash.NotLoaded) -> 203 | do_filter_record(fields, record, result) 204 | 205 | value -> 206 | do_filter_record(fields, record, Map.put(result, field, value)) 207 | end 208 | end 209 | 210 | defp do_filter_record([{field, children} | fields], record, result) when is_list(children) do 211 | value = do_filter_record(children, Map.fetch!(record, field)) 212 | do_filter_record(fields, record, Map.put(result, field, value)) 213 | end 214 | 215 | if Mix.env() == :test do 216 | defp stop, do: :ok 217 | else 218 | defp stop, do: System.stop(1) 219 | end 220 | end 221 | -------------------------------------------------------------------------------- /lib/ash_ops/transformer/prepare_task.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Transformer.PrepareTask do 6 | @moduledoc """ 7 | A Spark DSL transformer for all `mix_tasks` entities. 8 | """ 9 | use Spark.Dsl.Transformer 10 | import Spark.Dsl.Transformer 11 | 12 | alias Ash.Domain.Info, as: ADI 13 | alias Ash.Resource.Info, as: ARI 14 | alias AshOps.Info, as: AOI 15 | alias Spark.Error.DslError 16 | 17 | @doc false 18 | @impl true 19 | def transform(dsl) do 20 | dsl 21 | |> AOI.mix_tasks() 22 | |> Enum.reduce_while({:ok, dsl}, fn task, {:ok, dsl} -> 23 | case transform_entity(task, dsl) do 24 | {:ok, dsl} -> {:cont, {:ok, dsl}} 25 | {:error, reason} -> {:halt, {:error, reason}} 26 | end 27 | end) 28 | end 29 | 30 | defp transform_entity(task, dsl) do 31 | with :ok <- validate_resource(task, dsl), 32 | {:ok, action} <- validate_action(task, dsl), 33 | {:ok, task} <- validate_read_action(task, dsl), 34 | {:ok, task} <- set_domain(%{task | action: action}, dsl), 35 | {:ok, task} <- set_prefix(task, dsl), 36 | {:ok, task} <- set_task_name(task), 37 | {:ok, dsl} <- gen_task(task, dsl) do 38 | {:ok, replace_entity(dsl, [:mix_tasks], task)} 39 | end 40 | end 41 | 42 | defp validate_resource(task, dsl) do 43 | dsl 44 | |> ADI.resource(task.resource) 45 | |> case do 46 | {:ok, _resource} -> 47 | :ok 48 | 49 | {:error, _reason} -> 50 | module = get_persisted(dsl, :module) 51 | 52 | {:error, 53 | DslError.exception( 54 | module: module, 55 | path: [:mix_tasks, :get, task.name, :resource], 56 | message: """ 57 | The resource `#{inspect(task.resource)}` is not a member of the `#{inspect(module)}` domain. 58 | """ 59 | )} 60 | end 61 | end 62 | 63 | defp validate_action(task, dsl) do 64 | task.resource 65 | |> ARI.action(task.action) 66 | |> case do 67 | action when action.type == :read and task.type in [:get, :list] -> 68 | {:ok, action} 69 | 70 | action when action.type == task.type -> 71 | {:ok, action} 72 | 73 | nil -> 74 | {:error, 75 | DslError.exception( 76 | module: get_persisted(dsl, :module), 77 | path: [:mix_tasks, task.type, task.name, :action], 78 | message: """ 79 | The resource `#{inspect(task.resource)}` has no action named `#{inspect(task.action)}`. 80 | """ 81 | )} 82 | 83 | action when task.type in [:get, :list] -> 84 | {:error, 85 | DslError.exception( 86 | module: get_persisted(dsl, :module), 87 | path: [:mix_tasks, task.type, task.name, :action], 88 | message: """ 89 | Expected the action `#{task.action}` on the `#{inspect(task.resource)}` resource to be a #{task.type}, but it is a #{action.type}. 90 | """ 91 | )} 92 | 93 | action -> 94 | {:error, 95 | DslError.exception( 96 | module: get_persisted(dsl, :module), 97 | path: [:mix_tasks, task.type, task.name, :action], 98 | message: """ 99 | Expected the action `#{task.action}` on the `#{inspect(task.resource)}` resource to be a #{task.type}, but it is a #{action.type}. 100 | """ 101 | )} 102 | end 103 | end 104 | 105 | defp validate_read_action(task, dsl) when is_nil(task.read_action) do 106 | task.resource 107 | |> ARI.actions() 108 | |> Enum.find(&(&1.type == :read && &1.primary? == true)) 109 | |> case do 110 | nil -> 111 | {:error, 112 | DslError.exception( 113 | module: get_persisted(dsl, :module), 114 | path: [:mix_tasks, task.type, task.name, :read_action], 115 | message: """ 116 | Task requires a read action, but none was provided and no primary read is set. 117 | """ 118 | )} 119 | 120 | action -> 121 | {:ok, %{task | read_action: action}} 122 | end 123 | end 124 | 125 | defp validate_read_action(task, dsl) when is_atom(task.read_action) do 126 | task.resource 127 | |> ARI.action(task.read_action) 128 | |> case do 129 | %{type: :read} = action -> 130 | {:ok, %{task | read_action: action}} 131 | 132 | nil -> 133 | {:error, 134 | DslError.exception( 135 | module: get_persisted(dsl, :module), 136 | path: [:mix_tasks, task.type, task.name, :read_action], 137 | message: """ 138 | There is no read action named `#{task.read_action}` on the resource. 139 | """ 140 | )} 141 | 142 | action -> 143 | {:error, 144 | DslError.exception( 145 | module: get_persisted(dsl, :module), 146 | path: [:mix_tasks, task.type, task.name, :read_action], 147 | message: """ 148 | Expected the action `#{task.read_action}` to be a read. It is a `#{action.type}` 149 | """ 150 | )} 151 | end 152 | end 153 | 154 | defp validate_read_action(task, _dsl), do: {:ok, task} 155 | 156 | defp set_domain(task, dsl) do 157 | {:ok, %{task | domain: get_persisted(dsl, :module)}} 158 | end 159 | 160 | defp set_prefix(task, dsl) when is_nil(task.prefix) do 161 | case get_persisted(dsl, :otp_app) do 162 | nil -> 163 | {:error, 164 | DslError.exception( 165 | module: get_persisted(dsl, :module), 166 | path: [:mix_tasks, task.type, task.name, :prefix], 167 | message: """ 168 | Unable to set default mix task prefix because the `:otp_app` option is missing from the domain. 169 | 170 | Either set the `prefix` option directly, or add `otp_app: :my_app` to the `use Ash.Domain` statement in this module. 171 | """ 172 | )} 173 | 174 | app when is_atom(app) -> 175 | {:ok, %{task | prefix: app}} 176 | end 177 | end 178 | 179 | defp set_prefix(task, _dsl) when is_atom(task.prefix), do: {:ok, task} 180 | 181 | defp set_task_name(task) when is_binary(task.task_name), do: {:ok, task} 182 | 183 | defp set_task_name(task) do 184 | domain = 185 | task.domain 186 | |> Module.split() 187 | |> List.last() 188 | |> Macro.underscore() 189 | 190 | {:ok, %{task | task_name: "#{task.prefix}.#{domain}.#{task.name}"}} 191 | end 192 | 193 | defp gen_task(task, dsl) do 194 | mix_task_module_name = 195 | task.task_name 196 | |> String.replace(".", "/") 197 | |> Macro.camelize() 198 | |> then(&Module.concat("Mix.Tasks", &1)) 199 | 200 | ash_ops_task_module_name = 201 | task.type 202 | |> to_string() 203 | |> Macro.camelize() 204 | |> then(&Module.concat("AshOps.Task", &1)) 205 | 206 | dsl = 207 | dsl 208 | |> eval( 209 | [ 210 | ash_ops_task_module_name: ash_ops_task_module_name, 211 | task: task, 212 | mix_task_module_name: mix_task_module_name 213 | ], 214 | quote do 215 | defmodule unquote(mix_task_module_name) do 216 | use unquote(ash_ops_task_module_name), task: unquote(Macro.escape(task)) 217 | end 218 | end 219 | ) 220 | 221 | {:ok, dsl} 222 | end 223 | end 224 | -------------------------------------------------------------------------------- /lib/ash_ops/query_lang.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.QueryLang do 6 | @langdoc """ 7 | The Ash Ops query language is very similar to the 8 | [Ash expression syntax](https://hexdocs.pm/ash/expressions.html) 9 | while not actually being Elixir. 10 | 11 | ### Literals 12 | 13 | - `true` and `false` boolean literals. 14 | - Integer literals, eg `123`, `-123` and `0`. 15 | - Float literals, of the form `1.23` and `-1.23` (ie not scientific notation). 16 | - String literals of delimited with both single ticks (`'`) and double ticks 17 | (`"`) are interpreted as Elixir binaries. 18 | - Attribute path literals are entered directly, separated by `.` as necessary. 19 | 20 | ### Infix operators 21 | 22 | The following infix operators (and their aliases) are available: 23 | 24 | - `&&` and `and`, 25 | - `||` and `or`, 26 | - `*` and `times`, 27 | - `/` and `div`, 28 | - `+` and `plus`, 29 | - `-` and `minus`, 30 | - `<>` and `concat`, 31 | - `in`, 32 | - `>` and `gt`, 33 | - `>=` and `gte`, 34 | - `<` and `lt`, 35 | - `<=` and `lte`, 36 | - `==` and `eq`, 37 | - `!=` and `not_eq`. 38 | 39 | ### Function calls 40 | 41 | Function calls of the form `function(arg, arg)` format are allowed. The 42 | functions available are highly dependant on the extensions and data layer 43 | being used by the resource being queried. 44 | """ 45 | 46 | @moduledoc """ 47 | The query language for simple queries entered on the command-line. 48 | 49 | This query language is not capable of expressing the full breadth of the Ash 50 | expression language. It is designed to provide a simple subset of filters 51 | available to users of AshOps. If you need something more advanced then it's 52 | probably a good idea to make a special read query filtered in the way you 53 | require and expose that as a mix task instead. 54 | 55 | ## Syntax 56 | 57 | #{@langdoc} 58 | """ 59 | alias Ash.{Query, Query.BooleanExpression, Query.Call, Query.Operator, Query.Ref} 60 | require Query 61 | 62 | @doc """ 63 | Parse a query and return a compiled `Ash.Query` struct. 64 | """ 65 | @spec parse(AshOps.entity(), nil | String.t()) :: {:ok, Ash.Query.t()} | {:error, any} 66 | def parse(task, nil), do: {:ok, base_query(task)} 67 | 68 | def parse(task, query) do 69 | with {:ok, query} <- do_parse(query) do 70 | query 71 | |> precedence() 72 | |> compile(task) 73 | end 74 | end 75 | 76 | @doc false 77 | def doc, do: @langdoc 78 | 79 | defp base_query(task) do 80 | task.resource 81 | |> Query.new() 82 | |> Query.for_read(task.action.name) 83 | end 84 | 85 | defp do_parse(query) do 86 | query 87 | |> :ash_ops_query.parse() 88 | |> case do 89 | {:fail, {:expected, _, {{:line, line}, {:column, column}}}} -> 90 | {:error, "Unable to parse query at #{line}:#{column}"} 91 | 92 | {:fail, _} -> 93 | {:error, "Unable to parse query."} 94 | 95 | {_parsed, unparsed} -> 96 | {:error, "Unable to parse query: unexpected input `#{unparsed}`."} 97 | 98 | {_parsed, unparsed, {{:line, line}, {:column, column}}} -> 99 | {:error, "Unable to parse query: unexpected input `#{unparsed}` at #{line}:#{column}"} 100 | 101 | parsed when is_list(parsed) -> 102 | {:ok, parsed} 103 | end 104 | end 105 | 106 | defp precedence([lhs | rest]), do: do_prec(lhs, rest, 0) 107 | 108 | defp do_prec(lhs, [], _min_prec), do: lhs 109 | 110 | defp do_prec(lhs, [{:op, op, _, prec0}, rhs, {:op, _, assoc1, prec1} = op1 | rest], min_prec) 111 | when prec0 >= min_prec and 112 | ((assoc1 == :left and prec1 > prec0) or (assoc1 == :right and prec1 >= prec0)) do 113 | next_min_prec = if prec1 > prec0, do: prec0 + 1, else: prec0 114 | rhs = do_prec(rhs, [op1 | rest], next_min_prec) 115 | do_prec({:op, op, lhs, rhs}, [], min_prec) 116 | end 117 | 118 | defp do_prec(lhs, [{:op, op, _, _}, rhs | rest], min_prec) do 119 | do_prec({:op, op, lhs, rhs}, rest, min_prec) 120 | end 121 | 122 | defp compile(query, task) do 123 | with {:ok, filter} <- to_filter(query) do 124 | query = 125 | task 126 | |> base_query() 127 | |> Query.do_filter(filter) 128 | 129 | {:ok, query} 130 | end 131 | end 132 | 133 | defp to_filter({:op, :&&, lhs, rhs}) do 134 | with {:ok, lhs} <- to_filter(lhs), 135 | {:ok, rhs} <- to_filter(rhs) do 136 | {:ok, BooleanExpression.new(:and, lhs, rhs)} 137 | end 138 | end 139 | 140 | defp to_filter({:op, :||, lhs, rhs}) do 141 | with {:ok, lhs} <- to_filter(lhs), 142 | {:ok, rhs} <- to_filter(rhs) do 143 | {:ok, BooleanExpression.new(:or, lhs, rhs)} 144 | end 145 | end 146 | 147 | defp to_filter({:op, op, lhs, rhs}) do 148 | with {:ok, module} <- op_to_module(op), 149 | {:ok, lhs} <- to_filter(lhs), 150 | {:ok, rhs} <- to_filter(rhs) do 151 | Operator.new(module, lhs, rhs) 152 | end 153 | end 154 | 155 | defp to_filter({:path, segments}) do 156 | with {:ok, segments} <- 157 | segments 158 | |> map_while(fn 159 | {:ident, segment} -> {:ok, segment} 160 | other -> {:error, "Unexpected path segment: `#{inspect(other)}`"} 161 | end) do 162 | segments 163 | |> Enum.reverse() 164 | |> case do 165 | [ident] -> 166 | {:ok, %Ref{attribute: ident, relationship_path: [], input?: true}} 167 | 168 | [ident | rest] -> 169 | {:ok, %Ref{attribute: ident, relationship_path: Enum.reverse(rest), input?: true}} 170 | end 171 | end 172 | end 173 | 174 | defp to_filter({:function, {:ident, name}, arguments}) do 175 | with {:ok, arguments} <- map_while(arguments, &to_filter/1) do 176 | {:ok, %Call{name: name, args: arguments}} 177 | end 178 | end 179 | 180 | defp to_filter({:array, array}) do 181 | map_while(array, &to_filter/1) 182 | end 183 | 184 | defp to_filter({:string, value}) when is_binary(value), do: {:ok, value} 185 | defp to_filter({:float, float}) when is_float(float), do: {:ok, float} 186 | defp to_filter({:boolean, boolean}) when is_boolean(boolean), do: {:ok, boolean} 187 | defp to_filter({:integer, int}) when is_integer(int), do: {:ok, int} 188 | defp to_filter(token), do: {:error, "Unexpected token `#{inspect(token)}`"} 189 | 190 | defp op_to_module(:*), do: {:ok, Operator.Basic.Times} 191 | defp op_to_module(:/), do: {:ok, Operator.Basic.Div} 192 | defp op_to_module(:+), do: {:ok, Operator.Basic.Plus} 193 | defp op_to_module(:-), do: {:ok, Operator.Basic.Minus} 194 | defp op_to_module(:<>), do: {:ok, Operator.Basic.Concat} 195 | defp op_to_module(:in), do: {:ok, Operator.In} 196 | defp op_to_module(:>), do: {:ok, Operator.GreaterThan} 197 | defp op_to_module(:>=), do: {:ok, Operator.GreaterThanOrEqual} 198 | defp op_to_module(:<), do: {:ok, Operator.LessThan} 199 | defp op_to_module(:<=), do: {:ok, Operator.LessThanOrEqual} 200 | defp op_to_module(:==), do: {:ok, Operator.Eq} 201 | defp op_to_module(:!=), do: {:ok, Operator.NotEq} 202 | defp op_to_module(op), do: {:error, "Unknown infix operator `#{inspect(op)}`"} 203 | 204 | defp map_while(input, mapper) do 205 | with {:ok, result} <- 206 | Enum.reduce_while(input, {:ok, []}, fn element, {:ok, result} -> 207 | case mapper.(element) do 208 | {:ok, element} -> {:cont, {:ok, [element | result]}} 209 | {:error, reason} -> {:halt, {:error, reason}} 210 | end 211 | end) do 212 | {:ok, Enum.reverse(result)} 213 | end 214 | end 215 | end 216 | -------------------------------------------------------------------------------- /lib/ash_ops/task/types.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.Types do 6 | @moduledoc """ 7 | Custom `Spark.Options` types for casting and validating CLI arguments. 8 | """ 9 | alias Ash.Domain.Info, as: ADI 10 | alias Ash.Resource.Info, as: ARI 11 | alias Spark.Dsl.Extension 12 | 13 | @type task :: AshOps.entity() 14 | 15 | @doc "Custom option type for loading an actor" 16 | @spec actor(any, task) :: {:ok, Ash.Resource.record()} | {:error, any} 17 | def actor(input, task) when is_binary(input) do 18 | input 19 | |> String.split(":") 20 | |> case do 21 | [resource, id] -> parse_actor(task, resource, id) 22 | _ -> {:error, "Invalid actor"} 23 | end 24 | end 25 | 26 | def actor(_, _), do: {:error, "Invalid actor"} 27 | 28 | @doc "Custom option type for an identity" 29 | @spec identity(any, task) :: {:ok, atom} | {:error, any} 30 | def identity(identity, task) when is_binary(identity) do 31 | task.resource 32 | |> ARI.identities() 33 | |> Enum.reduce_while( 34 | {:error, "Resource `#{inspect(task.resource)}` has no identity named `#{identity}`"}, 35 | fn ident, error -> 36 | if to_string(ident.name) == identity do 37 | # credo:disable-for-next-line Credo.Check.Refactor.Nesting 38 | case ident.keys do 39 | [_] -> 40 | {:halt, {:ok, ident.name}} 41 | 42 | _ -> 43 | {:halt, 44 | {:error, 45 | "Identity named `#{identity}` on resource `#{inspect(task.resource)}` contains multiple fields"}} 46 | end 47 | 48 | {:halt, {:ok, ident.name}} 49 | else 50 | {:cont, error} 51 | end 52 | end 53 | ) 54 | end 55 | 56 | def identity(nil, task), do: find_resource_pk(task.resource) 57 | 58 | def identity(identity, _task), do: {:error, "Invalid identity `#{inspect(identity)}`"} 59 | 60 | @doc "Custom option type for positional arguments" 61 | @spec positional_arguments(any, task, Keyword.t(String.t()), Keyword.t(String.t())) :: 62 | {:ok, [any]} | {:error, any} 63 | def positional_arguments(input, task, before_args, after_args) when is_list(input) do 64 | expected_args = 65 | before_args 66 | |> Keyword.keys() 67 | |> Enum.concat(task.arguments) 68 | |> Enum.concat(Keyword.keys(after_args)) 69 | 70 | expected_arg_count = length(expected_args) 71 | 72 | input_length = length(input) 73 | 74 | if input_length == expected_arg_count do 75 | args = 76 | expected_args 77 | |> Enum.zip(input) 78 | |> Map.new() 79 | 80 | {:ok, args} 81 | else 82 | {:error, 83 | "Expected #{expected_arg_count} positional arguments, but received #{input_length}"} 84 | end 85 | end 86 | 87 | def positional_arguments(input, _task, _before_args, _after_args), 88 | do: {:error, "Invalid arguments `#{inspect(input)}`"} 89 | 90 | @doc "Custom option type for atom" 91 | @spec atom(String.t(), [atom]) :: {:ok, atom} | {:error, any} 92 | def atom(input, options) do 93 | input = String.downcase(input) 94 | 95 | Enum.reduce_while(options, {:error, "Invalid input `#{input}`"}, fn option, error -> 96 | if input == to_string(option) do 97 | {:halt, {:ok, option}} 98 | else 99 | {:cont, error} 100 | end 101 | end) 102 | end 103 | 104 | @doc "Custom option type for load" 105 | @spec load(any, task) :: {:ok, [atom]} | {:error, any} 106 | def load(input, task) when is_binary(input) do 107 | input 108 | |> String.split(~r/\s*,\s*/, trim: true) 109 | |> Enum.map(&String.split(&1, ~r/\s*\.\s*/, trim: true)) 110 | |> build_nested_loads() 111 | |> validate_nested_loads(task.resource) 112 | end 113 | 114 | def load(input, _task), do: {:error, "Invalid load `#{inspect(input)}`"} 115 | 116 | @doc "Custom option type for query filters" 117 | @spec filter(any) :: {:ok, String.t()} | {:error, any} 118 | def filter(input) when is_binary(input), do: {:ok, input} 119 | 120 | def filter(input), do: {:error, "Invalid filter `#{inspect(input)}`"} 121 | 122 | @doc "Custom option type for filter-stdin" 123 | @spec filter_stdin(any) :: {:ok, boolean} | {:error, any} 124 | def filter_stdin(input), do: {:ok, input > 0} 125 | 126 | @doc "Parse a sort input" 127 | @spec sort_input(any) :: {:ok, String.t()} | {:error, any} 128 | def sort_input(input) when is_binary(input) do 129 | input = 130 | input 131 | |> String.trim("'") 132 | |> String.trim("\"") 133 | 134 | {:ok, input} 135 | end 136 | 137 | defp build_nested_loads(loads, result \\ {[], []}) 138 | defp build_nested_loads([], {l_opts, kw_opts}), do: Enum.concat(l_opts, kw_opts) 139 | 140 | defp build_nested_loads([[field] | loads], {l_opts, kw_opts}) when is_binary(field) do 141 | build_nested_loads(loads, {[field | l_opts], kw_opts}) 142 | end 143 | 144 | defp build_nested_loads([[field | fields] | loads], {l_opts, kw_opts}) when is_binary(field) do 145 | nested = build_nested_loads([fields]) 146 | kw_opts = [{field, nested} | kw_opts] 147 | build_nested_loads(loads, {l_opts, kw_opts}) 148 | end 149 | 150 | defp validate_nested_loads(loads, resource, result \\ {[], []}) 151 | 152 | defp validate_nested_loads([], _resource, {l_opts, kw_opts}), 153 | do: {:ok, Enum.concat(l_opts, kw_opts)} 154 | 155 | defp validate_nested_loads([field | loads], resource, {l_opts, kw_opts}) 156 | when is_binary(field) do 157 | case ARI.public_field(resource, field) do 158 | nil -> 159 | {:error, 160 | "Field `#{field}` does not exist on the `#{inspect(resource)}` resource or is not public"} 161 | 162 | field -> 163 | validate_nested_loads(loads, resource, {[field.name | l_opts], kw_opts}) 164 | end 165 | end 166 | 167 | defp validate_nested_loads([{field, fields} | loads], resource, {l_opts, kw_opts}) do 168 | case ARI.public_relationship(resource, field) do 169 | nil -> 170 | {:error, 171 | "Relationship `#{field}` does not exist on the `#{inspect(resource)}` resource or is not public"} 172 | 173 | rel -> 174 | with {:ok, nested} <- validate_nested_loads(fields, rel.destination) do 175 | kw_opts = Keyword.put(kw_opts, rel.name, nested) 176 | validate_nested_loads(loads, resource, {l_opts, kw_opts}) 177 | end 178 | end 179 | end 180 | 181 | defp parse_actor(task, resource, id) do 182 | with {:ok, otp_app} <- otp_app(task), 183 | {:ok, resource} <- find_resource(otp_app, resource), 184 | {:ok, pk} <- find_resource_pk(resource) do 185 | {:ok, {resource, %{pk => %{"eq" => id}}}} 186 | end 187 | end 188 | 189 | defp find_resource(otp_app, resource) when is_binary(resource) do 190 | otp_app 191 | |> Application.get_env(:ash_domains, []) 192 | |> Stream.flat_map(&ADI.resources/1) 193 | |> Enum.reduce_while( 194 | {:error, "Unable to find a resource named `#{inspect(resource)}`"}, 195 | fn found, error -> 196 | if inspect(found) == resource do 197 | {:halt, {:ok, found}} 198 | else 199 | {:cont, error} 200 | end 201 | end 202 | ) 203 | end 204 | 205 | defp find_resource_pk(resource) do 206 | case ARI.primary_key(resource) do 207 | [] -> {:error, "The resource `#{inspect(resource)}` has no primary key configured."} 208 | [pk] -> {:ok, pk} 209 | _ -> {:error, "The resource `#{inspect(resource)}` has a composite primary key."} 210 | end 211 | end 212 | 213 | defp otp_app(task) do 214 | case Extension.get_persisted(task.domain, :otp_app) do 215 | nil -> {:error, "otp_app option is missing from `#{inspect(task.domain)}` domain"} 216 | domain -> {:ok, domain} 217 | end 218 | end 219 | end 220 | -------------------------------------------------------------------------------- /lib/ash_ops/task/create.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.Create do 6 | @moduledoc """ 7 | Provides the implementation of the `create` mix task. 8 | 9 | This should only ever be called from the mix task itself. 10 | """ 11 | 12 | alias Ash.{Changeset, Resource.Info} 13 | alias AshOps.Task.ArgSchema 14 | import AshOps.Task.Common 15 | 16 | @doc false 17 | def run(argv, task, arg_schema) do 18 | with {:ok, cfg} <- ArgSchema.parse(arg_schema, argv), 19 | {:ok, changeset} <- read_input(task, cfg), 20 | {:ok, record} <- create_record(changeset, task, cfg), 21 | {:ok, output} <- serialise_record(record, task.resource, cfg) do 22 | Mix.shell().info(output) 23 | :ok 24 | else 25 | {:error, reason} -> handle_error({:error, reason}) 26 | end 27 | end 28 | 29 | defp create_record(changeset, task, cfg) do 30 | opts = 31 | cfg 32 | |> Map.take([:load, :actor, :tenant]) 33 | |> Map.put(:domain, task.domain) 34 | |> Enum.to_list() 35 | 36 | changeset 37 | |> Changeset.for_create(task.action) 38 | |> Ash.create(opts) 39 | end 40 | 41 | defp read_input(task, cfg) when cfg.input == :interactive do 42 | argument_names = 43 | task.action.arguments 44 | |> Enum.filter(& &1.public?) 45 | |> MapSet.new(& &1.name) 46 | 47 | inputs = 48 | task.action.accept 49 | |> MapSet.new() 50 | |> MapSet.difference(MapSet.new(task.action.reject)) 51 | |> MapSet.union(argument_names) 52 | 53 | changeset = 54 | task.resource 55 | |> Changeset.new() 56 | 57 | Mix.shell().info( 58 | IO.ANSI.format([ 59 | "Creating new ", 60 | :cyan, 61 | inspect(task.resource), 62 | :reset, 63 | " using the ", 64 | :cyan, 65 | to_string(task.action.name), 66 | :reset, 67 | " action:\n" 68 | ]) 69 | ) 70 | 71 | prompt_for_inputs(inputs, task, changeset) 72 | end 73 | 74 | defp read_input(task, cfg) when cfg.input == :yaml do 75 | with {:ok, input} <- read_stdin() do 76 | case YamlElixir.read_from_string(input) do 77 | {:ok, map} when is_map(map) -> 78 | changeset = 79 | task.resource 80 | |> Changeset.new() 81 | |> Changeset.for_create(task.action.name, map) 82 | 83 | {:ok, changeset} 84 | 85 | {:ok, _other} -> 86 | {:error, "YAML input must be a map"} 87 | 88 | {:error, reason} -> 89 | {:error, reason} 90 | end 91 | end 92 | end 93 | 94 | defp read_input(task, cfg) when cfg.input == :json do 95 | with {:ok, input} <- read_stdin() do 96 | case Jason.decode(input) do 97 | {:ok, map} when is_map(map) -> 98 | changeset = 99 | task.resource 100 | |> Changeset.new() 101 | |> Changeset.for_create(task.action, map) 102 | 103 | {:ok, changeset} 104 | 105 | {:ok, _other} -> 106 | {:error, "JSON input must be a map"} 107 | 108 | {:error, reason} -> 109 | {:error, reason} 110 | end 111 | end 112 | end 113 | 114 | defp read_stdin do 115 | case IO.read(:eof) do 116 | {:error, reason} -> {:error, "Unable to read input from STDIN: #{inspect(reason)}"} 117 | :eof -> {:error, "No input received on STDIN"} 118 | input -> {:ok, input} 119 | end 120 | end 121 | 122 | defp prompt_for_inputs(inputs, task, changeset) do 123 | arguments = 124 | task.action.arguments 125 | |> Enum.filter(& &1.public?) 126 | |> Map.new(&{&1.name, &1}) 127 | 128 | Enum.reduce_while(inputs, {:ok, changeset}, fn input_name, {:ok, changeset} -> 129 | {input_type, entity} = 130 | if is_map_key(arguments, input_name) do 131 | {:argument, Map.fetch!(arguments, input_name)} 132 | else 133 | attribute = Info.attribute(task.resource, input_name) 134 | {:attribute, attribute} 135 | end 136 | 137 | case prompt_for_input( 138 | input_name, 139 | input_type, 140 | task, 141 | changeset, 142 | entity 143 | ) do 144 | {:ok, changeset} -> {:cont, {:ok, changeset}} 145 | {:error, reason} -> {:halt, {:error, reason}} 146 | end 147 | end) 148 | end 149 | 150 | defp prompt_for_input(input_name, input_type, task, changeset, entity, retries \\ 3) 151 | 152 | defp prompt_for_input(input_name, input_type, task, changeset, entity, 0) do 153 | prompt = 154 | IO.ANSI.format([ 155 | "Looks like you're having trouble entering the ", 156 | :cyan, 157 | to_string(input_name), 158 | :reset, 159 | " #{input_type}. Would you like to keep trying?" 160 | ]) 161 | |> IO.iodata_to_binary() 162 | 163 | if Mix.shell().yes?(prompt) do 164 | prompt_for_input(input_name, input_type, task, changeset, entity, 3) 165 | else 166 | {:error, "Aborted while entering `#{input_name}` #{input_type}."} 167 | end 168 | end 169 | 170 | defp prompt_for_input(input_name, input_type, task, input_changeset, entity, retries) do 171 | prompt = 172 | IO.ANSI.format([ 173 | "[", 174 | :cyan, 175 | to_string(input_name), 176 | :reset, 177 | "](", 178 | describe_type(entity.type, entity.constraints), 179 | "):" 180 | ]) 181 | |> IO.iodata_to_binary() 182 | 183 | input = 184 | prompt 185 | |> Mix.shell().prompt() 186 | |> String.trim() 187 | 188 | changeset = 189 | case input_type do 190 | :attribute -> Changeset.change_attribute(input_changeset, input_name, input) 191 | :argument -> Changeset.set_argument(input_changeset, input_name, input) 192 | end 193 | 194 | changeset 195 | |> Changeset.for_create(task.action) 196 | |> Map.get(:errors, []) 197 | |> Enum.filter(&(&1.field == input_name)) 198 | |> case do 199 | [] -> 200 | {:ok, changeset} 201 | 202 | errors -> 203 | Mix.shell().error(Ash.Error.error_descriptions(errors)) 204 | 205 | prompt_for_input(input_name, input_type, task, input_changeset, entity, retries - 1) 206 | end 207 | end 208 | 209 | defp describe_type(type, constraints) do 210 | if Ash.Type.composite?(type, constraints) do 211 | Ash.Type.describe(type, constraints) 212 | else 213 | Ash.Type.short_names() 214 | |> Enum.find(&(elem(&1, 1) == type)) 215 | |> case do 216 | nil -> Ash.Type.describe(type, constraints) 217 | {short, _} -> to_string(short) 218 | end 219 | end 220 | end 221 | 222 | @doc false 223 | defmacro __using__(opts) do 224 | quote generated: true do 225 | @task unquote(opts[:task]) 226 | @arg_schema @task 227 | |> ArgSchema.default() 228 | |> ArgSchema.add_switch( 229 | :input, 230 | :string, 231 | [ 232 | type: {:custom, AshOps.Task.Types, :atom, [[:json, :yaml, :interactive]]}, 233 | default: "interactive", 234 | required: false, 235 | doc: 236 | "Read action input from STDIN in this format. Valid options are `json`, `yaml` and `interactive`. Defaults to `interactive`." 237 | ], 238 | [:i] 239 | ) 240 | 241 | @shortdoc "Create a `#{inspect(@task.resource)}` record using the `#{@task.action.name}` action" 242 | 243 | @moduledoc """ 244 | #{@shortdoc} 245 | 246 | #{if @task.description, do: "#{@task.description}\n\n"} 247 | 248 | #{if @task.action.description, do: """ 249 | ## Action 250 | 251 | #{@task.action.description} 252 | 253 | """} 254 | ## Usage 255 | 256 | Action input can be provided via YAML or JSON on STDIN, or interactively. 257 | 258 | #{ArgSchema.usage(@task, @arg_schema)} 259 | """ 260 | use Mix.Task 261 | 262 | @requirements ["app.start"] 263 | 264 | @impl Mix.Task 265 | def run(args) do 266 | unquote(__MODULE__).run(args, @task, @arg_schema) 267 | end 268 | end 269 | end 270 | end 271 | -------------------------------------------------------------------------------- /lib/ash_ops/task/update.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.Update do 6 | @moduledoc """ 7 | Provides the implementation of the `update` mix task. 8 | 9 | This should only ever be called from the mix task itself. 10 | """ 11 | alias Ash.{Changeset, Query, Resource.Info} 12 | alias AshOps.Task.ArgSchema 13 | require Query 14 | import AshOps.Task.Common 15 | 16 | @doc false 17 | def run(argv, task, arg_schema) do 18 | with {:ok, cfg} <- ArgSchema.parse(arg_schema, argv), 19 | {:ok, actor} <- load_actor(cfg[:actor], cfg[:tenant]), 20 | cfg <- Map.put(cfg, :actor, actor), 21 | {:ok, record} <- load_record(task, cfg), 22 | {:ok, changeset} <- read_input(record, task, cfg), 23 | {:ok, record} <- update_record(changeset, task, cfg), 24 | {:ok, output} <- serialise_record(record, task.resource, cfg) do 25 | Mix.shell().info(output) 26 | :ok 27 | else 28 | {:error, reason} -> handle_error({:error, reason}) 29 | end 30 | end 31 | 32 | defp update_record(changeset, task, cfg) do 33 | opts = 34 | cfg 35 | |> Map.take([:load, :actor, :tenant]) 36 | |> Map.put(:domain, task.domain) 37 | |> Enum.to_list() 38 | 39 | Ash.update(changeset, %{}, opts) 40 | end 41 | 42 | defp load_record(task, cfg) do 43 | opts = 44 | cfg 45 | |> Map.take([:actor, :tenant]) 46 | |> Map.put(:domain, task.domain) 47 | |> Map.put(:not_found_error?, true) 48 | |> Map.put(:authorize_with, :error) 49 | |> Enum.to_list() 50 | 51 | with {:ok, field} <- identity_or_pk_field(task.resource, cfg) do 52 | task.resource 53 | |> Query.new() 54 | |> Query.for_read(task.read_action.name) 55 | |> Query.filter_input(%{field => %{"eq" => cfg.positional_arguments.id}}) 56 | |> Ash.read_one(opts) 57 | end 58 | end 59 | 60 | defp read_input(record, task, cfg) when cfg.input == :interactive do 61 | argument_names = 62 | task.action.arguments 63 | |> Enum.filter(& &1.public?) 64 | |> MapSet.new(& &1.name) 65 | 66 | inputs = 67 | task.action.accept 68 | |> MapSet.new() 69 | |> MapSet.difference(MapSet.new(task.action.reject)) 70 | |> MapSet.union(argument_names) 71 | 72 | changeset = 73 | record 74 | |> Changeset.new() 75 | 76 | Mix.shell().info( 77 | IO.ANSI.format([ 78 | "Updating ", 79 | :cyan, 80 | inspect(task.resource), 81 | :reset, 82 | " record using the ", 83 | :cyan, 84 | to_string(task.action.name), 85 | :reset, 86 | " action:\n" 87 | ]) 88 | ) 89 | 90 | with {:ok, changeset} <- prompt_for_inputs(inputs, task, changeset) do 91 | {:ok, Changeset.for_update(changeset, task.update)} 92 | end 93 | end 94 | 95 | defp read_input(record, task, cfg) when cfg.input == :yaml do 96 | with {:ok, input} <- read_stdin() do 97 | case YamlElixir.read_from_string(input) do 98 | {:ok, map} when is_map(map) -> 99 | changeset = 100 | record 101 | |> Changeset.new() 102 | |> Changeset.for_update(task.action.name, map) 103 | 104 | {:ok, changeset} 105 | 106 | {:ok, _other} -> 107 | {:error, "YAML input must be a map"} 108 | 109 | {:error, reason} -> 110 | {:error, reason} 111 | end 112 | end 113 | end 114 | 115 | defp read_input(record, task, cfg) when cfg.input == :json do 116 | with {:ok, input} <- read_stdin() do 117 | case Jason.decode(input) do 118 | {:ok, map} when is_map(map) -> 119 | changeset = 120 | record 121 | |> Changeset.new() 122 | |> Changeset.for_update(task.action, map) 123 | 124 | {:ok, changeset} 125 | 126 | {:ok, _other} -> 127 | {:error, "JSON input must be a map"} 128 | 129 | {:error, reason} -> 130 | {:error, reason} 131 | end 132 | end 133 | end 134 | 135 | defp read_stdin do 136 | case IO.read(:eof) do 137 | {:error, reason} -> {:error, "Unable to read input from STDIN: #{inspect(reason)}"} 138 | :eof -> {:error, "No input received on STDIN"} 139 | input -> {:ok, input} 140 | end 141 | end 142 | 143 | defp prompt_for_inputs(inputs, task, changeset) do 144 | arguments = 145 | task.action.arguments 146 | |> Enum.filter(& &1.public?) 147 | |> Map.new(&{&1.name, &1}) 148 | 149 | Enum.reduce_while(inputs, {:ok, changeset}, fn input_name, {:ok, changeset} -> 150 | {input_type, entity} = 151 | if is_map_key(arguments, input_name) do 152 | {:argument, Map.fetch!(arguments, input_name)} 153 | else 154 | attribute = Info.attribute(task.resource, input_name) 155 | {:attribute, attribute} 156 | end 157 | 158 | case prompt_for_input(input_name, input_type, task, changeset, entity) do 159 | {:ok, changeset} -> {:cont, {:ok, changeset}} 160 | {:error, reason} -> {:halt, {:error, reason}} 161 | end 162 | end) 163 | end 164 | 165 | defp prompt_for_input(input_name, input_type, task, changeset, entity, retries \\ 3) 166 | 167 | defp prompt_for_input(input_name, input_type, task, changeset, entity, 0) do 168 | prompt = 169 | IO.ANSI.format([ 170 | "Looks like you're having trouble updating the ", 171 | :cyan, 172 | to_string(input_name), 173 | :reset, 174 | " #{input_type}. Would you like to keep trying?" 175 | ]) 176 | |> IO.iodata_to_binary() 177 | 178 | if Mix.shell().yes?(prompt) do 179 | prompt_for_input(input_name, input_type, task, changeset, entity, 3) 180 | else 181 | {:error, "Aborted while entering `#{input_name}` #{input_type}."} 182 | end 183 | end 184 | 185 | defp prompt_for_input(input_name, input_type, task, input_changeset, entity, retries) do 186 | current_value = 187 | input_changeset 188 | |> Changeset.get_argument_or_attribute(input_name) 189 | |> inspect(syntax_colors: IO.ANSI.syntax_colors()) 190 | |> then(&(&1 <> "\n")) 191 | 192 | prompt = 193 | IO.ANSI.format([ 194 | "[", 195 | :cyan, 196 | to_string(input_name), 197 | :reset, 198 | "](", 199 | describe_type(entity.type, entity.constraints), 200 | "):" 201 | ]) 202 | |> IO.iodata_to_binary() 203 | 204 | input = 205 | (current_value <> prompt) 206 | |> Mix.shell().prompt() 207 | |> String.trim() 208 | 209 | changeset = 210 | case input_type do 211 | :attribute -> Changeset.change_attribute(input_changeset, input_name, input) 212 | :argument -> Changeset.set_argument(input_changeset, input_name, input) 213 | end 214 | 215 | changeset 216 | |> Changeset.for_update(task.action) 217 | |> Map.get(:errors, []) 218 | |> Enum.filter(&(&1.field == input_name)) 219 | |> case do 220 | [] -> 221 | {:ok, changeset} 222 | 223 | errors -> 224 | Mix.shell().error(Ash.Error.error_descriptions(errors)) 225 | 226 | prompt_for_input(input_name, input_type, task, input_changeset, entity, retries - 1) 227 | end 228 | end 229 | 230 | defp describe_type(type, constraints) do 231 | if Ash.Type.composite?(type, constraints) do 232 | Ash.Type.describe(type, constraints) 233 | else 234 | Ash.Type.short_names() 235 | |> Enum.find(&(elem(&1, 1) == type)) 236 | |> case do 237 | nil -> Ash.Type.describe(type, constraints) 238 | {short, _} -> to_string(short) 239 | end 240 | end 241 | end 242 | 243 | @doc false 244 | defmacro __using__(opts) do 245 | quote generated: true do 246 | @task unquote(opts[:task]) 247 | @arg_schema @task 248 | |> ArgSchema.default() 249 | |> ArgSchema.prepend_positional(:id, "A unique identifier for the record") 250 | |> ArgSchema.add_switch( 251 | :identity, 252 | :string, 253 | [ 254 | type: {:custom, AshOps.Task.Types, :identity, [@task]}, 255 | required: false, 256 | doc: "The identity to use to retrieve the record." 257 | ], 258 | [:i] 259 | ) 260 | |> ArgSchema.add_switch( 261 | :input, 262 | :string, 263 | type: {:custom, AshOps.Task.Types, :atom, [[:json, :yaml, :interactive]]}, 264 | default: "interactive", 265 | required: false, 266 | doc: 267 | "Read action input from STDIN in this format. Valid options are `json`, `yaml` and `interactive`. Defaults to `interactive`." 268 | ) 269 | 270 | @shortdoc "Update a single `#{inspect(@task.resource)}` record using the `#{@task.action.name}` action" 271 | 272 | @moduledoc """ 273 | #{@shortdoc} 274 | 275 | #{if @task.description, do: "#{@task.description}\n\n"} 276 | #{if @task.action.description, do: """ 277 | ## Action 278 | 279 | #{@task.action.description} 280 | """} 281 | ## Usage 282 | 283 | Records are looked up by their primary key unless the `--identity` option 284 | is used. The identity must not be composite (ie only contain a single 285 | field). 286 | 287 | Matching records are updated using input provided as YAML or JSON on 288 | STDIN or interactively. 289 | 290 | #{ArgSchema.usage(@task, @arg_schema)} 291 | """ 292 | use Mix.Task 293 | 294 | @requirements ["app.start"] 295 | 296 | @impl Mix.Task 297 | def run(args) do 298 | unquote(__MODULE__).run(args, @task, @arg_schema) 299 | end 300 | end 301 | end 302 | end 303 | -------------------------------------------------------------------------------- /lib/ash_ops/task/arg_schema.ex: -------------------------------------------------------------------------------- 1 | # SPDX-FileCopyrightText: 2025 ash_ops contributors 2 | # 3 | # SPDX-License-Identifier: MIT 4 | 5 | defmodule AshOps.Task.ArgSchema do 6 | @moduledoc """ 7 | A struct which contains information about the arguments a task expects to 8 | receive. 9 | """ 10 | 11 | defstruct [:op_schema, :so_schema] 12 | 13 | @type t :: %__MODULE__{ 14 | op_schema: OptionParser.options(), 15 | so_schema: Spark.Options.schema() 16 | } 17 | 18 | @doc """ 19 | Return the default arguments that all tasks expect to take. 20 | """ 21 | @spec default(AshOps.entity()) :: t 22 | def default(task) do 23 | %__MODULE__{ 24 | op_schema: [ 25 | aliases: [ 26 | a: :actor, 27 | f: :format, 28 | l: :load, 29 | t: :tenant 30 | ], 31 | strict: [ 32 | actor: :string, 33 | format: :string, 34 | load: :string, 35 | tenant: :string 36 | ] 37 | ], 38 | so_schema: [ 39 | actor: [ 40 | type: {:custom, AshOps.Task.Types, :actor, [task]}, 41 | required: false, 42 | doc: 43 | "Specify the actor to use for the request in the format `resource:id`, eg: `Example.Accounts.User:abc123`." 44 | ], 45 | format: [ 46 | type: {:custom, AshOps.Task.Types, :atom, [[:json, :yaml]]}, 47 | required: false, 48 | default: "yaml", 49 | doc: 50 | "The output format to display the result in. Either `json` or `yaml`. Defaults to `:yaml`" 51 | ], 52 | load: [ 53 | type: {:custom, AshOps.Task.Types, :load, [task]}, 54 | required: false, 55 | doc: 56 | "An optional load query as a comma separated list of fields, fields can be nested with dots." 57 | ], 58 | tenant: [ 59 | type: :string, 60 | required: false, 61 | doc: "Specify a tenant to use when executing the query." 62 | ], 63 | positional_arguments: [ 64 | type: {:custom, AshOps.Task.Types, :positional_arguments, [task, [], []]}, 65 | required: true 66 | ] 67 | ] 68 | } 69 | end 70 | 71 | @doc """ 72 | Prepend a positional argument to the beginning argument list 73 | 74 | ie before any action arguments taken by the task. 75 | """ 76 | @spec prepend_positional(t, atom, String.t()) :: t 77 | def prepend_positional(arg_schema, name, help_text) do 78 | arg_schema 79 | |> update_positional_args(fn before_args, after_args -> 80 | {[{name, help_text} | before_args], after_args} 81 | end) 82 | end 83 | 84 | @doc """ 85 | Append a positional argument to the end of the argument list 86 | 87 | ie after any action arguments taken by the task. 88 | """ 89 | @spec append_positional(t, atom, String.t()) :: t 90 | def append_positional(arg_schema, name, help_text) do 91 | arg_schema 92 | |> update_positional_args(fn before_args, after_args -> 93 | {before_args, Enum.concat(after_args, [{name, help_text}])} 94 | end) 95 | end 96 | 97 | @doc """ 98 | Remove a positional argument by name. 99 | """ 100 | @spec remove_positional(t, atom) :: t 101 | def remove_positional(arg_schema, name) do 102 | arg_schema 103 | |> update_positional_args(fn before_args, after_args -> 104 | {Keyword.delete(before_args, name), Keyword.delete(after_args, name)} 105 | end) 106 | end 107 | 108 | @doc """ 109 | Add a switch to the arguments 110 | 111 | ## Arguments 112 | 113 | - `name` the name of the switch - this will be dasherised by `OptionParser`. 114 | - `op_type` the type to cast the argument to (as per `OptionParser.parse/2`). 115 | - `so_schema` the `Spark.Options` schema fragment for to validate the resulting input. 116 | - `help_text` the text to display when asked to render usage information. 117 | - `aliases` a list of "short name" aliases for the switch. 118 | """ 119 | @spec add_switch(t, atom, atom, keyword(), [atom]) :: t 120 | def add_switch(arg_schema, name, op_type, so_schema, aliases \\ []) do 121 | arg_schema 122 | |> Map.update!(:op_schema, fn schema -> 123 | schema 124 | |> Keyword.update!(:strict, &Keyword.put(&1, name, op_type)) 125 | |> Keyword.update!(:aliases, fn existing_aliases -> 126 | aliases 127 | |> Enum.reduce(existing_aliases, &Keyword.put(&2, &1, name)) 128 | end) 129 | end) 130 | |> Map.update!(:so_schema, fn schema -> 131 | schema 132 | |> Keyword.put(name, so_schema) 133 | end) 134 | end 135 | 136 | @doc """ 137 | Remove a switch from the argument schemas 138 | """ 139 | @spec remove_switch(t, atom) :: t 140 | def remove_switch(arg_schema, name) do 141 | arg_schema 142 | |> Map.update!(:op_schema, fn schema -> 143 | schema 144 | |> Keyword.update!(:strict, &Keyword.delete(&1, name)) 145 | |> Keyword.update!(:aliases, fn aliases -> 146 | Enum.reject(aliases, &(elem(&1, 1) == name)) 147 | end) 148 | end) 149 | |> Map.update!(:so_schema, &Keyword.delete(&1, name)) 150 | end 151 | 152 | defp update_positional_args(arg_schema, updater) when is_function(updater, 2) do 153 | arg_schema 154 | |> Map.update!(:so_schema, fn schema -> 155 | schema 156 | |> Keyword.update!(:positional_arguments, fn positional_arguments -> 157 | positional_arguments 158 | # credo:disable-for-next-line Credo.Check.Refactor.Nesting 159 | |> Keyword.update!(:type, fn {:custom, AshOps.Task.Types, :positional_arguments, 160 | [task, before_args, after_args]} -> 161 | {before_args, after_args} = updater.(before_args, after_args) 162 | {:custom, AshOps.Task.Types, :positional_arguments, [task, before_args, after_args]} 163 | end) 164 | end) 165 | end) 166 | end 167 | 168 | @doc """ 169 | Parse and validate the command-line arguments. 170 | """ 171 | @spec parse(t, OptionParser.argv()) :: {:ok, %{atom => any}} | {:error, any} 172 | def parse(arg_schema, argv) do 173 | with {:ok, parsed} <- parse_args(argv, arg_schema.op_schema), 174 | {:ok, valid} <- Spark.Options.validate(parsed, arg_schema.so_schema) do 175 | {:ok, Map.new(valid)} 176 | end 177 | end 178 | 179 | @doc """ 180 | Display usage information about the arguments 181 | """ 182 | @spec usage(AshOps.entity(), t) :: String.t() 183 | def usage(task, arg_schema) do 184 | [ 185 | """ 186 | ## Example 187 | 188 | ```bash 189 | #{example_usage(task, arg_schema)} 190 | ``` 191 | """, 192 | if has_positional_args?(arg_schema) do 193 | """ 194 | 195 | ## Command line arguments 196 | 197 | #{positional_argument_usage(arg_schema)} 198 | """ 199 | end, 200 | if has_switches?(arg_schema) do 201 | """ 202 | 203 | ## Command line options 204 | 205 | #{switch_usage(arg_schema)} 206 | """ 207 | end 208 | ] 209 | |> Enum.map_join("\n", &to_string/1) 210 | end 211 | 212 | defp has_positional_args?(arg_schema) do 213 | arg_schema.so_schema 214 | |> Keyword.get(:positional_arguments, []) 215 | |> Keyword.get(:type, {:custom, AshOps.Task.Types, :positional_arguments, [nil, [], []]}) 216 | |> elem(3) 217 | |> Enum.drop(1) 218 | |> Enum.concat() 219 | |> Enum.any?() 220 | end 221 | 222 | defp has_switches?(arg_schema) do 223 | arg_schema.so_schema 224 | |> Keyword.delete(:positional_arguments) 225 | |> Enum.any?() 226 | end 227 | 228 | defp example_usage(task, arg_schema) do 229 | underscored_domain = 230 | task.domain 231 | |> Module.split() 232 | |> List.last() 233 | |> Macro.underscore() 234 | 235 | positional_args = 236 | arg_schema 237 | |> extract_positional_args() 238 | |> Enum.map(fn {name, _} -> 239 | name 240 | |> to_string() 241 | |> String.upcase() 242 | end) 243 | 244 | ["mix #{task.prefix}.#{underscored_domain}.#{task.name}" | positional_args] 245 | |> Enum.join(" ") 246 | end 247 | 248 | defp extract_positional_args(arg_schema) do 249 | {:custom, AshOps.Task.Types, :positional_arguments, [task, before_args, after_args]} = 250 | arg_schema 251 | |> Map.fetch!(:so_schema) 252 | |> Keyword.fetch!(:positional_arguments) 253 | |> Keyword.fetch!(:type) 254 | 255 | mid_args = 256 | task.arguments 257 | |> Enum.map(fn arg -> 258 | description = 259 | task.action.arguments 260 | |> Enum.find_value("Argument to the `#{task.action.name}` action", fn action_arg -> 261 | action_arg.name == arg && action_arg.description 262 | end) 263 | 264 | {arg, description} 265 | end) 266 | 267 | before_args 268 | |> Enum.concat(mid_args) 269 | |> Enum.concat(after_args) 270 | end 271 | 272 | defp positional_argument_usage(arg_schema) do 273 | arg_schema 274 | |> extract_positional_args() 275 | |> Enum.map_join("\n", fn {name, description} -> 276 | name = 277 | name 278 | |> to_string() 279 | |> String.upcase() 280 | 281 | " * `#{name}` - #{description}" 282 | end) 283 | end 284 | 285 | defp switch_usage(arg_schema) do 286 | arg_schema.op_schema 287 | |> Keyword.fetch!(:strict) 288 | |> Enum.map_join("\n", fn 289 | {name, type} -> 290 | underscored = 291 | name 292 | |> to_string() 293 | |> String.replace("_", "-") 294 | 295 | aliases = 296 | arg_schema.op_schema 297 | |> Keyword.fetch!(:aliases) 298 | |> Enum.filter(&(elem(&1, 1) == name)) 299 | |> Enum.map(&"`-#{elem(&1, 0)}`") 300 | 301 | aliases = 302 | if type == :boolean do 303 | ["`--no-#{underscored}`" | aliases] 304 | else 305 | aliases 306 | end 307 | 308 | aliases = 309 | if Enum.any?(aliases) do 310 | "(#{Enum.join(aliases, ", ")}) " 311 | end 312 | 313 | help_text = 314 | arg_schema.so_schema 315 | |> Keyword.fetch!(name) 316 | |> Keyword.fetch!(:doc) 317 | 318 | " * `--#{underscored}` #{aliases}- #{help_text}" 319 | end) 320 | end 321 | 322 | defp parse_args(argv, op_schema) do 323 | case OptionParser.parse(argv, op_schema) do 324 | {parsed, argv, []} -> 325 | args = 326 | parsed 327 | |> Keyword.put(:positional_arguments, argv) 328 | 329 | {:ok, args} 330 | 331 | {_, _, errors} -> 332 | {:error, "Unable to parse arguments: `#{inspect(errors)}`"} 333 | end 334 | end 335 | end 336 | -------------------------------------------------------------------------------- /documentation/dsls/DSL-AshOps.md: -------------------------------------------------------------------------------- 1 | 4 | # AshOps 5 | 6 | An extension for `Ash.Domain` that adds the ability expose resource actions as 7 | mix tasks. 8 | 9 | 10 | ## mix_tasks 11 | Resource actions to expose as mix tasks. 12 | 13 | 14 | ### Nested DSLs 15 | * [action](#mix_tasks-action) 16 | * [create](#mix_tasks-create) 17 | * [destroy](#mix_tasks-destroy) 18 | * [get](#mix_tasks-get) 19 | * [list](#mix_tasks-list) 20 | * [update](#mix_tasks-update) 21 | 22 | 23 | ### Examples 24 | ``` 25 | mix_tasks do 26 | action Post, :publish_post, :publish 27 | create Post, :create_post, :create 28 | destroy Post, :destroy_post, :destroy 29 | get Post, :get_post, :read 30 | list Post, :list_posts, :read 31 | update Post, :update_post, :update 32 | end 33 | 34 | ``` 35 | 36 | 37 | 38 | 39 | ### mix_tasks.action 40 | ```elixir 41 | action resource, name, action 42 | ``` 43 | 44 | 45 | Generate a mix task which calls a generic action and returns the created 46 | record. 47 | 48 | #### Example 49 | 50 | Defining the following `action` in your domain: 51 | 52 | ```elixir 53 | mix_tasks do 54 | action Post, :publish_post, :publish, arguments: [:id, :platform] 55 | end 56 | ``` 57 | 58 | Will result in the following mix task being available: 59 | 60 | ```bash 61 | mix my_app.blog.publish_post 62 | ``` 63 | 64 | 65 | 66 | 67 | 68 | 69 | ### Arguments 70 | 71 | | Name | Type | Default | Docs | 72 | |------|------|---------|------| 73 | | [`resource`](#mix_tasks-action-resource){: #mix_tasks-action-resource .spark-required} | `module` | | The resource whose actions to use | 74 | | [`name`](#mix_tasks-action-name){: #mix_tasks-action-name .spark-required} | `atom` | | The name of the mix task to generate | 75 | | [`action`](#mix_tasks-action-action){: #mix_tasks-action-action .spark-required} | `atom` | | The name of the action to use | 76 | ### Options 77 | 78 | | Name | Type | Default | Docs | 79 | |------|------|---------|------| 80 | | [`arguments`](#mix_tasks-action-arguments){: #mix_tasks-action-arguments } | `atom \| list(atom)` | `[]` | A list of action arguments which should be taken as positional arguments on the command line | 81 | | [`description`](#mix_tasks-action-description){: #mix_tasks-action-description } | `String.t` | | Documentation to be displayed in the mix task's help section | 82 | | [`prefix`](#mix_tasks-action-prefix){: #mix_tasks-action-prefix } | `atom` | | The prefix to use for the mix task name (ie the part before the first "."). Defaults to the `otp_app` setting of the domain | 83 | 84 | 85 | 86 | 87 | 88 | ### Introspection 89 | 90 | Target: `AshOps.Entity.Action` 91 | 92 | ### mix_tasks.create 93 | ```elixir 94 | create resource, name, action 95 | ``` 96 | 97 | 98 | Generate a mix task which calls a create action and returns the created 99 | record. 100 | 101 | #### Example 102 | 103 | Defining the following `create` in your domain: 104 | 105 | ```elixir 106 | mix_tasks do 107 | create Post, :create_post, :create 108 | end 109 | ``` 110 | 111 | Will result in the following mix task being available: 112 | 113 | ```bash 114 | mix my_app.blog.create_post 115 | ``` 116 | 117 | 118 | 119 | 120 | 121 | 122 | ### Arguments 123 | 124 | | Name | Type | Default | Docs | 125 | |------|------|---------|------| 126 | | [`resource`](#mix_tasks-create-resource){: #mix_tasks-create-resource .spark-required} | `module` | | The resource whose actions to use | 127 | | [`name`](#mix_tasks-create-name){: #mix_tasks-create-name .spark-required} | `atom` | | The name of the mix task to generate | 128 | | [`action`](#mix_tasks-create-action){: #mix_tasks-create-action .spark-required} | `atom` | | The name of the create action to use | 129 | ### Options 130 | 131 | | Name | Type | Default | Docs | 132 | |------|------|---------|------| 133 | | [`description`](#mix_tasks-create-description){: #mix_tasks-create-description } | `String.t` | | Documentation to be displayed in the mix task's help section | 134 | | [`prefix`](#mix_tasks-create-prefix){: #mix_tasks-create-prefix } | `atom` | | The prefix to use for the mix task name (ie the part before the first "."). Defaults to the `otp_app` setting of the domain | 135 | 136 | 137 | 138 | 139 | 140 | ### Introspection 141 | 142 | Target: `AshOps.Entity.Create` 143 | 144 | ### mix_tasks.destroy 145 | ```elixir 146 | destroy resource, name, action 147 | ``` 148 | 149 | 150 | Generate a mix task which calls a destroy action and removes a single record 151 | by primary key or identity. 152 | 153 | #### Example 154 | 155 | Defining the following `destroy` in your domain: 156 | 157 | ```elixir 158 | mix_tasks do 159 | destroy Post, :destroy_post, :destroy 160 | end 161 | ``` 162 | 163 | Will result in the following mix task being available: 164 | 165 | ```bash 166 | mix my_app.blog.destroy_post "01953abc-c4e9-7661-a79a-243b0d982ab7" 167 | status: ok 168 | ``` 169 | 170 | 171 | 172 | 173 | 174 | 175 | ### Arguments 176 | 177 | | Name | Type | Default | Docs | 178 | |------|------|---------|------| 179 | | [`resource`](#mix_tasks-destroy-resource){: #mix_tasks-destroy-resource .spark-required} | `module` | | The resource whose action to use | 180 | | [`name`](#mix_tasks-destroy-name){: #mix_tasks-destroy-name .spark-required} | `atom` | | The name of the mix task to generate | 181 | | [`action`](#mix_tasks-destroy-action){: #mix_tasks-destroy-action .spark-required} | `atom` | | The name of the destroy action to use | 182 | ### Options 183 | 184 | | Name | Type | Default | Docs | 185 | |------|------|---------|------| 186 | | [`arguments`](#mix_tasks-destroy-arguments){: #mix_tasks-destroy-arguments } | `atom \| list(atom)` | `[]` | A list of action arguments which should be taken as positional arguments on the command line | 187 | | [`description`](#mix_tasks-destroy-description){: #mix_tasks-destroy-description } | `String.t` | | Documentation to be displayed in the mix task's help section | 188 | | [`prefix`](#mix_tasks-destroy-prefix){: #mix_tasks-destroy-prefix } | `atom` | | The prefix to use for the mix task name (ie the part before the first "."). Defaults to the `otp_app` setting of the domain | 189 | | [`read_action`](#mix_tasks-destroy-read_action){: #mix_tasks-destroy-read_action } | `atom` | | The read action to use to query for matching records to destroy. Defaults to the primary read action. | 190 | 191 | 192 | 193 | 194 | 195 | ### Introspection 196 | 197 | Target: `AshOps.Entity.Destroy` 198 | 199 | ### mix_tasks.get 200 | ```elixir 201 | get resource, name, action 202 | ``` 203 | 204 | 205 | Generate a mix task which calls a read action and returns a single record 206 | by primary key or identity. 207 | 208 | #### Example 209 | 210 | Defining the following `get` in your domain: 211 | 212 | ```elixir 213 | mix_tasks do 214 | get Post, :get_post, :read 215 | end 216 | ``` 217 | 218 | Will result in the following mix task being available: 219 | 220 | ```bash 221 | mix my_app.blog.get_post "01953abc-c4e9-7661-a79a-243b0d982ab7" 222 | title: Example blog post 223 | body: This is the example blog post 224 | ``` 225 | 226 | 227 | 228 | 229 | 230 | 231 | ### Arguments 232 | 233 | | Name | Type | Default | Docs | 234 | |------|------|---------|------| 235 | | [`resource`](#mix_tasks-get-resource){: #mix_tasks-get-resource .spark-required} | `module` | | The resource whose action to use | 236 | | [`name`](#mix_tasks-get-name){: #mix_tasks-get-name .spark-required} | `atom` | | The name of the mix task to generate | 237 | | [`action`](#mix_tasks-get-action){: #mix_tasks-get-action .spark-required} | `atom` | | The name of the read action to use | 238 | ### Options 239 | 240 | | Name | Type | Default | Docs | 241 | |------|------|---------|------| 242 | | [`arguments`](#mix_tasks-get-arguments){: #mix_tasks-get-arguments } | `atom \| list(atom)` | `[]` | A list of action arguments which should be taken as positional arguments on the command line | 243 | | [`description`](#mix_tasks-get-description){: #mix_tasks-get-description } | `String.t` | | Documentation to be displayed in the mix task's help section | 244 | | [`prefix`](#mix_tasks-get-prefix){: #mix_tasks-get-prefix } | `atom` | | The prefix to use for the mix task name (ie the part before the first "."). Defaults to the `otp_app` setting of the domain | 245 | 246 | 247 | 248 | 249 | 250 | ### Introspection 251 | 252 | Target: `AshOps.Entity.Get` 253 | 254 | ### mix_tasks.list 255 | ```elixir 256 | list resource, name, action 257 | ``` 258 | 259 | 260 | Generate a mix task which calls a read action and returns any matching records. 261 | 262 | #### Example 263 | 264 | Define the following `list` in your domain:application 265 | 266 | ```elixir 267 | mix_tasks do 268 | list Post, :list_posts, :read 269 | end 270 | ``` 271 | 272 | Will result in the following mix task being available:application 273 | 274 | ```bash 275 | mix my_app.blog.list_posts 276 | ``` 277 | 278 | 279 | 280 | 281 | 282 | 283 | ### Arguments 284 | 285 | | Name | Type | Default | Docs | 286 | |------|------|---------|------| 287 | | [`resource`](#mix_tasks-list-resource){: #mix_tasks-list-resource .spark-required} | `module` | | The resource whose action to use | 288 | | [`name`](#mix_tasks-list-name){: #mix_tasks-list-name .spark-required} | `atom` | | The name of the mix task to generate | 289 | | [`action`](#mix_tasks-list-action){: #mix_tasks-list-action .spark-required} | `atom` | | The name of the read action to use | 290 | ### Options 291 | 292 | | Name | Type | Default | Docs | 293 | |------|------|---------|------| 294 | | [`arguments`](#mix_tasks-list-arguments){: #mix_tasks-list-arguments } | `atom \| list(atom)` | | A comma-separated list of action arguments can be taken as positional arguments on the command line | 295 | | [`description`](#mix_tasks-list-description){: #mix_tasks-list-description } | `String.t` | | Documentation to be displayed in the mix task's help section | 296 | | [`prefix`](#mix_tasks-list-prefix){: #mix_tasks-list-prefix } | `atom` | | The prefix to use for the mix task name (ie the part before the first "."). Defaults to the `otp_app` setting of the domain | 297 | 298 | 299 | 300 | 301 | 302 | ### Introspection 303 | 304 | Target: `AshOps.Entity.List` 305 | 306 | ### mix_tasks.update 307 | ```elixir 308 | update resource, name, action 309 | ``` 310 | 311 | 312 | Generate a mix task which calls an update action and updates a single record 313 | by primary key or identity. 314 | 315 | #### Example 316 | 317 | Defining the following `update` in your domain: 318 | 319 | ```elixir 320 | mix_tasks do 321 | update Post, :update_post, :update 322 | end 323 | ``` 324 | 325 | Will result in the following mix task being available: 326 | 327 | ```bash 328 | mix my_app.blog.update_post "01953abc-c4e9-7661-a79a-243b0d982ab7" 329 | ``` 330 | 331 | 332 | 333 | 334 | 335 | 336 | ### Arguments 337 | 338 | | Name | Type | Default | Docs | 339 | |------|------|---------|------| 340 | | [`resource`](#mix_tasks-update-resource){: #mix_tasks-update-resource .spark-required} | `module` | | The resource whose action to use | 341 | | [`name`](#mix_tasks-update-name){: #mix_tasks-update-name .spark-required} | `atom` | | The name of the mix task to generate | 342 | | [`action`](#mix_tasks-update-action){: #mix_tasks-update-action .spark-required} | `atom` | | The name of the destroy action to use | 343 | ### Options 344 | 345 | | Name | Type | Default | Docs | 346 | |------|------|---------|------| 347 | | [`arguments`](#mix_tasks-update-arguments){: #mix_tasks-update-arguments } | `atom \| list(atom)` | `[]` | A list of action arguments which should be taken as positional arguments on the command line | 348 | | [`description`](#mix_tasks-update-description){: #mix_tasks-update-description } | `String.t` | | Documentation to be displayed in the mix task's help section | 349 | | [`prefix`](#mix_tasks-update-prefix){: #mix_tasks-update-prefix } | `atom` | | The prefix to use for the mix task name (ie the part before the first "."). Defaults to the `otp_app` setting of the domain | 350 | | [`read_action`](#mix_tasks-update-read_action){: #mix_tasks-update-read_action } | `atom` | | The read action to use to query for matching records to update. Defaults to the primary read action. | 351 | 352 | 353 | 354 | 355 | 356 | ### Introspection 357 | 358 | Target: `AshOps.Entity.Update` 359 | 360 | 361 | 362 | 363 | 364 | 365 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "ash": {:hex, :ash, "3.10.0", "839d696ef8a4d1f5b980a469fb19ef1383f21ddfb0e602ef91fc9811b2be529a", [:mix], [{:crux, ">= 0.1.2 and < 1.0.0-0", [hex: :crux, repo: "hexpm", optional: false]}, {:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:ecto, "~> 3.7", [hex: :ecto, repo: "hexpm", optional: false]}, {:ets, "~> 0.8", [hex: :ets, repo: "hexpm", optional: false]}, {:igniter, ">= 0.6.29 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: true]}, {:jason, ">= 1.0.0", [hex: :jason, repo: "hexpm", optional: false]}, {:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: true]}, {:plug, ">= 0.0.0", [hex: :plug, repo: "hexpm", optional: true]}, {:reactor, "~> 0.11", [hex: :reactor, repo: "hexpm", optional: false]}, {:simple_sat, ">= 0.1.1 and < 1.0.0-0", [hex: :simple_sat, repo: "hexpm", optional: true]}, {:spark, ">= 2.3.14 and < 3.0.0-0", [hex: :spark, repo: "hexpm", optional: false]}, {:splode, ">= 0.2.6 and < 1.0.0-0", [hex: :splode, repo: "hexpm", optional: false]}, {:stream_data, "~> 1.0", [hex: :stream_data, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.1", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "04b722edb6f8674fbe6ee7833e7e7ca43c404635e748bc4d17a6a1dba288dfc7"}, 3 | "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, 4 | "credo": {:hex, :credo, "1.7.13", "126a0697df6b7b71cd18c81bc92335297839a806b6f62b61d417500d1070ff4e", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "47641e6d2bbff1e241e87695b29f617f1a8f912adea34296fb10ecc3d7e9e84f"}, 5 | "crux": {:hex, :crux, "0.1.2", "4441c9e3a34f1e340954ce96b9ad5a2de13ceb4f97b3f910211227bb92e2ca90", [:mix], [{:picosat_elixir, "~> 0.2", [hex: :picosat_elixir, repo: "hexpm", optional: true]}, {:simple_sat, ">= 0.1.1 and < 1.0.0-0", [hex: :simple_sat, repo: "hexpm", optional: true]}, {:stream_data, "~> 1.0", [hex: :stream_data, repo: "hexpm", optional: true]}], "hexpm", "563ea3748ebfba9cc078e6d198a1d6a06015a8fae503f0b721363139f0ddb350"}, 6 | "decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"}, 7 | "dialyxir": {:hex, :dialyxir, "1.4.7", "dda948fcee52962e4b6c5b4b16b2d8fa7d50d8645bbae8b8685c3f9ecb7f5f4d", [:mix], [{:erlex, ">= 0.2.8", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "b34527202e6eb8cee198efec110996c25c5898f43a4094df157f8d28f27d9efe"}, 8 | "doctor": {:hex, :doctor, "0.22.0", "223e1cace1f16a38eda4113a5c435fa9b10d804aa72d3d9f9a71c471cc958fe7", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm", "96e22cf8c0df2e9777dc55ebaa5798329b9028889c4023fed3305688d902cd5b"}, 9 | "earmark_parser": {:hex, :earmark_parser, "1.4.44", "f20830dd6b5c77afe2b063777ddbbff09f9759396500cdbe7523efd58d7a339c", [:mix], [], "hexpm", "4778ac752b4701a5599215f7030989c989ffdc4f6df457c5f36938cc2d2a2750"}, 10 | "ecto": {:hex, :ecto, "3.13.5", "9d4a69700183f33bf97208294768e561f5c7f1ecf417e0fa1006e4a91713a834", [:mix], [{:decimal, "~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "df9efebf70cf94142739ba357499661ef5dbb559ef902b68ea1f3c1fabce36de"}, 11 | "erlex": {:hex, :erlex, "0.2.8", "cd8116f20f3c0afe376d1e8d1f0ae2452337729f68be016ea544a72f767d9c12", [:mix], [], "hexpm", "9d66ff9fedf69e49dc3fd12831e12a8a37b76f8651dd21cd45fcf5561a8a7590"}, 12 | "ets": {:hex, :ets, "0.9.0", "79c6a6c205436780486f72d84230c6cba2f8a9920456750ddd1e47389107d5fd", [:mix], [], "hexpm", "2861fdfb04bcaeff370f1a5904eec864f0a56dcfebe5921ea9aadf2a481c822b"}, 13 | "ex_check": {:hex, :ex_check, "0.16.0", "07615bef493c5b8d12d5119de3914274277299c6483989e52b0f6b8358a26b5f", [:mix], [], "hexpm", "4d809b72a18d405514dda4809257d8e665ae7cf37a7aee3be6b74a34dec310f5"}, 14 | "ex_doc": {:hex, :ex_doc, "0.39.1", "e19d356a1ba1e8f8cfc79ce1c3f83884b6abfcb79329d435d4bbb3e97ccc286e", [:mix], [{:earmark_parser, "~> 1.4.44", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "8abf0ed3e3ca87c0847dfc4168ceab5bedfe881692f1b7c45f4a11b232806865"}, 15 | "faker": {:hex, :faker, "0.18.0", "943e479319a22ea4e8e39e8e076b81c02827d9302f3d32726c5bf82f430e6e14", [:mix], [], "hexpm", "bfbdd83958d78e2788e99ec9317c4816e651ad05e24cfd1196ce5db5b3e81797"}, 16 | "file_system": {:hex, :file_system, "1.1.1", "31864f4685b0148f25bd3fbef2b1228457c0c89024ad67f7a81a3ffbc0bbad3a", [:mix], [], "hexpm", "7a15ff97dfe526aeefb090a7a9d3d03aa907e100e262a0f8f7746b78f8f87a5d"}, 17 | "finch": {:hex, :finch, "0.20.0", "5330aefb6b010f424dcbbc4615d914e9e3deae40095e73ab0c1bb0968933cadf", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "2658131a74d051aabfcba936093c903b8e89da9a1b63e430bee62045fa9b2ee2"}, 18 | "git_cli": {:hex, :git_cli, "0.3.0", "a5422f9b95c99483385b976f5d43f7e8233283a47cda13533d7c16131cb14df5", [:mix], [], "hexpm", "78cb952f4c86a41f4d3511f1d3ecb28edb268e3a7df278de2faa1bd4672eaf9b"}, 19 | "git_ops": {:hex, :git_ops, "2.9.0", "b74f6040084f523055b720cc7ef718da47f2cbe726a5f30c2871118635cb91c1", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:igniter, ">= 0.5.27 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}, {:req, "~> 0.5", [hex: :req, repo: "hexpm", optional: false]}], "hexpm", "7fdf84be3490e5692c5dc1f8a1084eed47a221c1063e41938c73312f0bfea259"}, 20 | "glob_ex": {:hex, :glob_ex, "0.1.11", "cb50d3f1ef53f6ca04d6252c7fde09fd7a1cf63387714fe96f340a1349e62c93", [:mix], [], "hexpm", "342729363056e3145e61766b416769984c329e4378f1d558b63e341020525de4"}, 21 | "hpax": {:hex, :hpax, "1.0.3", "ed67ef51ad4df91e75cc6a1494f851850c0bd98ebc0be6e81b026e765ee535aa", [:mix], [], "hexpm", "8eab6e1cfa8d5918c2ce4ba43588e894af35dbd8e91e6e55c817bca5847df34a"}, 22 | "igniter": {:hex, :igniter, "0.7.0", "6848714fa5afa14258c82924a57af9364745316241a409435cf39cbe11e3ae80", [:mix], [{:glob_ex, "~> 0.1.7", [hex: :glob_ex, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:owl, "~> 0.11", [hex: :owl, repo: "hexpm", optional: false]}, {:phx_new, "~> 1.7", [hex: :phx_new, repo: "hexpm", optional: true]}, {:req, "~> 0.5", [hex: :req, repo: "hexpm", optional: false]}, {:rewrite, ">= 1.1.1 and < 2.0.0-0", [hex: :rewrite, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.4", [hex: :sourceror, repo: "hexpm", optional: false]}, {:spitfire, ">= 0.1.3 and < 1.0.0-0", [hex: :spitfire, repo: "hexpm", optional: false]}], "hexpm", "1e7254780dbf4b44c9eccd6d86d47aa961efc298d7f520c24acb0258c8e90ba9"}, 23 | "iterex": {:hex, :iterex, "0.1.2", "58f9b9b9a22a55cbfc7b5234a9c9c63eaac26d276b3db80936c0e1c60355a5a6", [:mix], [], "hexpm", "2e103b8bcc81757a9af121f6dc0df312c9a17220f302b1193ef720460d03029d"}, 24 | "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, 25 | "libgraph": {:hex, :libgraph, "0.16.0", "3936f3eca6ef826e08880230f806bfea13193e49bf153f93edcf0239d4fd1d07", [:mix], [], "hexpm", "41ca92240e8a4138c30a7e06466acc709b0cbb795c643e9e17174a178982d6bf"}, 26 | "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"}, 27 | "makeup_elixir": {:hex, :makeup_elixir, "1.0.1", "e928a4f984e795e41e3abd27bfc09f51db16ab8ba1aebdba2b3a575437efafc2", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "7284900d412a3e5cfd97fdaed4f5ed389b8f2b4cb49efc0eb3bd10e2febf9507"}, 28 | "makeup_erlang": {:hex, :makeup_erlang, "1.0.2", "03e1804074b3aa64d5fad7aa64601ed0fb395337b982d9bcf04029d68d51b6a7", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "af33ff7ef368d5893e4a267933e7744e46ce3cf1f61e2dccf53a111ed3aa3727"}, 29 | "mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"}, 30 | "mint": {:hex, :mint, "1.7.1", "113fdb2b2f3b59e47c7955971854641c61f378549d73e829e1768de90fc1abf1", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "fceba0a4d0f24301ddee3024ae116df1c3f4bb7a563a731f45fdfeb9d39a231b"}, 31 | "mix_audit": {:hex, :mix_audit, "2.1.5", "c0f77cee6b4ef9d97e37772359a187a166c7a1e0e08b50edf5bf6959dfe5a016", [:make, :mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.11", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "87f9298e21da32f697af535475860dc1d3617a010e0b418d2ec6142bc8b42d69"}, 32 | "neotoma": {:hex, :neotoma, "1.7.3", "d8bd5404b73273989946e4f4f6d529e5c2088f5fa1ca790b4dbe81f4be408e61", [:rebar], [], "hexpm", "2da322b9b1567ffa0706a7f30f6bbbde70835ae44a1050615f4b4a3d436e0f28"}, 33 | "neotoma_compiler": {:hex, :neotoma_compiler, "0.1.2", "6b42c1841e8b47a80a47666cb9febe777e3209566d6c24fd93cd04ff073823f2", [:mix], [{:neotoma, "~> 1.7", [hex: :neotoma, repo: "hexpm", optional: false]}], "hexpm", "6cf262f9bdc89a2c5610592e7608a18bbac62a1e6d9e27f830e5f010b5219638"}, 34 | "nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"}, 35 | "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"}, 36 | "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"}, 37 | "owl": {:hex, :owl, "0.13.0", "26010e066d5992774268f3163506972ddac0a7e77bfe57fa42a250f24d6b876e", [:mix], [{:ucwidth, "~> 0.2", [hex: :ucwidth, repo: "hexpm", optional: true]}], "hexpm", "59bf9d11ce37a4db98f57cb68fbfd61593bf419ec4ed302852b6683d3d2f7475"}, 38 | "reactor": {:hex, :reactor, "0.17.0", "eb8bdb530dbae824e2d36a8538f8ec4f3aa7c2d1b61b04959fa787c634f88b49", [:mix], [{:igniter, "~> 0.4", [hex: :igniter, repo: "hexpm", optional: true]}, {:iterex, "~> 0.1", [hex: :iterex, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:libgraph, "~> 0.16", [hex: :libgraph, repo: "hexpm", optional: false]}, {:spark, ">= 2.3.3 and < 3.0.0-0", [hex: :spark, repo: "hexpm", optional: false]}, {:splode, "~> 0.2", [hex: :splode, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.2", [hex: :telemetry, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.11", [hex: :yaml_elixir, repo: "hexpm", optional: false]}, {:ymlr, "~> 5.0", [hex: :ymlr, repo: "hexpm", optional: false]}], "hexpm", "3c3bf71693adbad9117b11ec83cfed7d5851b916ade508ed9718de7ae165bf25"}, 39 | "req": {:hex, :req, "0.5.16", "99ba6a36b014458e52a8b9a0543bfa752cb0344b2a9d756651db1281d4ba4450", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "974a7a27982b9b791df84e8f6687d21483795882a7840e8309abdbe08bb06f09"}, 40 | "rewrite": {:hex, :rewrite, "1.2.0", "80220eb14010e175b67c939397e1a8cdaa2c32db6e2e0a9d5e23e45c0414ce21", [:mix], [{:glob_ex, "~> 0.1", [hex: :glob_ex, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.0", [hex: :sourceror, repo: "hexpm", optional: false]}, {:text_diff, "~> 0.1", [hex: :text_diff, repo: "hexpm", optional: false]}], "hexpm", "a1cd702bbb9d51613ab21091f04a386d750fc6f4516b81900df082d78b2d8c50"}, 41 | "simple_sat": {:hex, :simple_sat, "0.1.4", "39baf72cdca14f93c0b6ce2b6418b72bbb67da98fa9ca4384e2f79bbc299899d", [:mix], [], "hexpm", "3569b68e346a5fd7154b8d14173ff8bcc829f2eb7b088c30c3f42a383443930b"}, 42 | "sobelow": {:hex, :sobelow, "0.14.1", "2f81e8632f15574cba2402bcddff5497b413c01e6f094bc0ab94e83c2f74db81", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8fac9a2bd90fdc4b15d6fca6e1608efb7f7c600fa75800813b794ee9364c87f2"}, 43 | "sourceror": {:hex, :sourceror, "1.10.0", "38397dedbbc286966ec48c7af13e228b171332be1ad731974438c77791945ce9", [:mix], [], "hexpm", "29dbdfc92e04569c9d8e6efdc422fc1d815f4bd0055dc7c51b8800fb75c4b3f1"}, 44 | "spark": {:hex, :spark, "2.3.14", "a08420d08e6e0e49d740aed3e160f1cb894ba8f6b3f5e6c63253e9df1995265c", [:mix], [{:igniter, ">= 0.3.64 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: true]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: true]}, {:sourceror, "~> 1.2", [hex: :sourceror, repo: "hexpm", optional: true]}], "hexpm", "af50c4ea5dd67eba822247f1c98e1d4e598cb7f6c28ccf5d002f0e0718096f4f"}, 45 | "spitfire": {:hex, :spitfire, "0.2.1", "29e154873f05444669c7453d3d931820822cbca5170e88f0f8faa1de74a79b47", [:mix], [], "hexpm", "6eeed75054a38341b2e1814d41bb0a250564092358de2669fdb57ff88141d91b"}, 46 | "splode": {:hex, :splode, "0.2.9", "3a2776e187c82f42f5226b33b1220ccbff74f4bcc523dd4039c804caaa3ffdc7", [:mix], [], "hexpm", "8002b00c6e24f8bd1bcced3fbaa5c33346048047bb7e13d2f3ad428babbd95c3"}, 47 | "stream_data": {:hex, :stream_data, "1.2.0", "58dd3f9e88afe27dc38bef26fce0c84a9e7a96772b2925c7b32cd2435697a52b", [:mix], [], "hexpm", "eb5c546ee3466920314643edf68943a5b14b32d1da9fe01698dc92b73f89a9ed"}, 48 | "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, 49 | "text_diff": {:hex, :text_diff, "0.1.0", "1caf3175e11a53a9a139bc9339bd607c47b9e376b073d4571c031913317fecaa", [:mix], [], "hexpm", "d1ffaaecab338e49357b6daa82e435f877e0649041ace7755583a0ea3362dbd7"}, 50 | "yamerl": {:hex, :yamerl, "0.10.0", "4ff81fee2f1f6a46f1700c0d880b24d193ddb74bd14ef42cb0bcf46e81ef2f8e", [:rebar3], [], "hexpm", "346adb2963f1051dc837a2364e4acf6eb7d80097c0f53cbdc3046ec8ec4b4e6e"}, 51 | "yaml_elixir": {:hex, :yaml_elixir, "2.12.0", "30343ff5018637a64b1b7de1ed2a3ca03bc641410c1f311a4dbdc1ffbbf449c7", [:mix], [{:yamerl, "~> 0.10", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "ca6bacae7bac917a7155dca0ab6149088aa7bc800c94d0fe18c5238f53b313c6"}, 52 | "ymlr": {:hex, :ymlr, "5.1.4", "b924d61e1fc1ec371cde6ab3ccd9311110b1e052fc5c2460fb322e8380e7712a", [:mix], [], "hexpm", "75f16cf0709fbd911b30311a0359a7aa4b5476346c01882addefd5f2b1cfaa51"}, 53 | } 54 | -------------------------------------------------------------------------------- /src/ash_ops_query.erl: -------------------------------------------------------------------------------- 1 | -module(ash_ops_query). 2 | -export([parse/1,file/1]). 3 | -define(p_anything,true). 4 | -define(p_charclass,true). 5 | -define(p_choose,true). 6 | -define(p_label,true). 7 | -define(p_not,true). 8 | -define(p_one_or_more,true). 9 | -define(p_optional,true). 10 | -define(p_scan,true). 11 | -define(p_seq,true). 12 | -define(p_string,true). 13 | -define(p_zero_or_more,true). 14 | 15 | 16 | 17 | -spec file(file:name()) -> any(). 18 | file(Filename) -> case file:read_file(Filename) of {ok,Bin} -> parse(Bin); Err -> Err end. 19 | 20 | -spec parse(binary() | list()) -> any(). 21 | parse(List) when is_list(List) -> parse(unicode:characters_to_binary(List)); 22 | parse(Input) when is_binary(Input) -> 23 | _ = setup_memo(), 24 | Result = case 'query'(Input,{{line,1},{column,1}}) of 25 | {AST, <<>>, _Index} -> AST; 26 | Any -> Any 27 | end, 28 | release_memo(), Result. 29 | 30 | -spec 'query'(input(), index()) -> parse_result(). 31 | 'query'(Input, Index) -> 32 | p(Input, Index, 'query', fun(I,D) -> (fun 'expr'/2)(I,D) end, fun(Node, Idx) ->transform('query', Node, Idx) end). 33 | 34 | -spec 'expr'(input(), index()) -> parse_result(). 35 | 'expr'(Input, Index) -> 36 | p(Input, Index, 'expr', fun(I,D) -> (p_seq([p_label('lhs', fun 'expr_single'/2), p_label('rhs', p_zero_or_more(p_seq([p_optional(fun 'space'/2), fun 'op'/2, p_optional(fun 'space'/2), fun 'expr_single'/2])))]))(I,D) end, fun(Node, _Idx) -> 37 | Lhs = proplists:get_value(lhs, Node), 38 | Rhs = proplists:get_value(rhs, Node), 39 | Rhs2 = lists:flatmap(fun([_, Op, _, E]) -> [Op, E] end, Rhs), 40 | [Lhs | Rhs2] 41 | end). 42 | 43 | -spec 'expr_single'(input(), index()) -> parse_result(). 44 | 'expr_single'(Input, Index) -> 45 | p(Input, Index, 'expr_single', fun(I,D) -> (p_choose([fun 'array'/2, fun 'braced'/2, fun 'function'/2, fun 'literal'/2, fun 'path'/2]))(I,D) end, fun(Node, Idx) ->transform('expr_single', Node, Idx) end). 46 | 47 | -spec 'braced'(input(), index()) -> parse_result(). 48 | 'braced'(Input, Index) -> 49 | p(Input, Index, 'braced', fun(I,D) -> (p_seq([p_string(<<"(">>), p_optional(fun 'space'/2), p_label('e', fun 'expr'/2), p_optional(fun 'space'/2), p_string(<<")">>)]))(I,D) end, fun(Node, _Idx) -> 50 | proplists:get_value(e, Node) 51 | end). 52 | 53 | -spec 'function'(input(), index()) -> parse_result(). 54 | 'function'(Input, Index) -> 55 | p(Input, Index, 'function', fun(I,D) -> (p_seq([p_label('name', fun 'ident'/2), p_string(<<"(">>), p_optional(fun 'space'/2), p_label('args', p_optional(fun 'function_args'/2)), p_optional(fun 'space'/2), p_string(<<")">>)]))(I,D) end, fun(Node, _Idx) -> 56 | Name = proplists:get_value(name, Node), 57 | Args = proplists:get_value(args, Node), 58 | {function, Name, Args} 59 | end). 60 | 61 | -spec 'function_args'(input(), index()) -> parse_result(). 62 | 'function_args'(Input, Index) -> 63 | p(Input, Index, 'function_args', fun(I,D) -> (p_seq([p_label('head', p_zero_or_more(p_seq([fun 'expr'/2, p_optional(fun 'space'/2), p_string(<<",">>), p_optional(fun 'space'/2)]))), p_label('tail', fun 'expr'/2)]))(I,D) end, fun(Node, _Idx) -> 64 | Head = lists:flatmap(fun([E, _, _, _]) -> E end, proplists:get_value(head, Node)), 65 | Tail = proplists:get_value(tail, Node), 66 | lists:append(Head, Tail) 67 | end). 68 | 69 | -spec 'op'(input(), index()) -> parse_result(). 70 | 'op'(Input, Index) -> 71 | p(Input, Index, 'op', fun(I,D) -> (p_choose([fun 'op_and'/2, fun 'op_or'/2, fun 'op_eq'/2, fun 'op_neq'/2, fun 'op_concat'/2, fun 'op_gte'/2, fun 'op_gt'/2, fun 'op_lte'/2, fun 'op_lt'/2, fun 'op_in'/2, fun 'op_mul'/2, fun 'op_div'/2, fun 'op_add'/2, fun 'op_sub'/2]))(I,D) end, fun(Node, Idx) ->transform('op', Node, Idx) end). 72 | 73 | -spec 'op_mul'(input(), index()) -> parse_result(). 74 | 'op_mul'(Input, Index) -> 75 | p(Input, Index, 'op_mul', fun(I,D) -> (p_choose([p_string(<<"*">>), p_string(<<"times">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '*', left, 8} end). 76 | 77 | -spec 'op_div'(input(), index()) -> parse_result(). 78 | 'op_div'(Input, Index) -> 79 | p(Input, Index, 'op_div', fun(I,D) -> (p_choose([p_string(<<"\/">>), p_string(<<"div">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '/', left, 8} end). 80 | 81 | -spec 'op_add'(input(), index()) -> parse_result(). 82 | 'op_add'(Input, Index) -> 83 | p(Input, Index, 'op_add', fun(I,D) -> (p_choose([p_string(<<"+">>), p_string(<<"plus">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '+', left, 7} end). 84 | 85 | -spec 'op_sub'(input(), index()) -> parse_result(). 86 | 'op_sub'(Input, Index) -> 87 | p(Input, Index, 'op_sub', fun(I,D) -> (p_choose([p_string(<<"-">>), p_string(<<"minus">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '-', left, 7} end). 88 | 89 | -spec 'op_concat'(input(), index()) -> parse_result(). 90 | 'op_concat'(Input, Index) -> 91 | p(Input, Index, 'op_concat', fun(I,D) -> (p_choose([p_string(<<"<>">>), p_string(<<"concat">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '<>', right, 6} end). 92 | 93 | -spec 'op_in'(input(), index()) -> parse_result(). 94 | 'op_in'(Input, Index) -> 95 | p(Input, Index, 'op_in', fun(I,D) -> (p_string(<<"in">>))(I,D) end, fun(_Node, _Idx) ->{op, in, left, 5} end). 96 | 97 | -spec 'op_gt'(input(), index()) -> parse_result(). 98 | 'op_gt'(Input, Index) -> 99 | p(Input, Index, 'op_gt', fun(I,D) -> (p_choose([p_string(<<">">>), p_string(<<"gt">>), p_string(<<"greater_than">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '>', left, 4} end). 100 | 101 | -spec 'op_gte'(input(), index()) -> parse_result(). 102 | 'op_gte'(Input, Index) -> 103 | p(Input, Index, 'op_gte', fun(I,D) -> (p_choose([p_string(<<">=">>), p_string(<<"gte">>), p_string(<<"greater_than_or_equal">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '>=', left, 4} end). 104 | 105 | -spec 'op_lt'(input(), index()) -> parse_result(). 106 | 'op_lt'(Input, Index) -> 107 | p(Input, Index, 'op_lt', fun(I,D) -> (p_choose([p_string(<<"<">>), p_string(<<"lt">>), p_string(<<"less_than">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '<', left, 4} end). 108 | 109 | -spec 'op_lte'(input(), index()) -> parse_result(). 110 | 'op_lte'(Input, Index) -> 111 | p(Input, Index, 'op_lte', fun(I,D) -> (p_choose([p_string(<<"<=">>), p_string(<<"lte">>), p_string(<<"less_than_or_equal">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '<=', left, 4} end). 112 | 113 | -spec 'op_eq'(input(), index()) -> parse_result(). 114 | 'op_eq'(Input, Index) -> 115 | p(Input, Index, 'op_eq', fun(I,D) -> (p_choose([p_string(<<"==">>), p_string(<<"eq">>), p_string(<<"equals">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '==', left, 3} end). 116 | 117 | -spec 'op_neq'(input(), index()) -> parse_result(). 118 | 'op_neq'(Input, Index) -> 119 | p(Input, Index, 'op_neq', fun(I,D) -> (p_choose([p_string(<<"!=">>), p_string(<<"not_eq">>), p_string(<<"not_equals">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '!=', left, 3} end). 120 | 121 | -spec 'op_and'(input(), index()) -> parse_result(). 122 | 'op_and'(Input, Index) -> 123 | p(Input, Index, 'op_and', fun(I,D) -> (p_choose([p_string(<<"&&">>), p_string(<<"and">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '&&', left, 2} end). 124 | 125 | -spec 'op_or'(input(), index()) -> parse_result(). 126 | 'op_or'(Input, Index) -> 127 | p(Input, Index, 'op_or', fun(I,D) -> (p_choose([p_string(<<"||">>), p_string(<<"or">>)]))(I,D) end, fun(_Node, _Idx) ->{op, '||', left, 1} end). 128 | 129 | -spec 'path'(input(), index()) -> parse_result(). 130 | 'path'(Input, Index) -> 131 | p(Input, Index, 'path', fun(I,D) -> (p_seq([p_label('head', fun 'ident'/2), p_label('tail', p_zero_or_more(p_seq([p_string(<<".">>), fun 'path_element'/2])))]))(I,D) end, fun(Node, _Idx) -> 132 | Head = proplists:get_value(head, Node), 133 | Tail = lists:map(fun([_, E]) -> E end, proplists:get_value(tail, Node)), 134 | {path, [Head | Tail]} 135 | end). 136 | 137 | -spec 'path_element'(input(), index()) -> parse_result(). 138 | 'path_element'(Input, Index) -> 139 | p(Input, Index, 'path_element', fun(I,D) -> (fun 'ident'/2)(I,D) end, fun(Node, Idx) ->transform('path_element', Node, Idx) end). 140 | 141 | -spec 'array'(input(), index()) -> parse_result(). 142 | 'array'(Input, Index) -> 143 | p(Input, Index, 'array', fun(I,D) -> (p_seq([p_string(<<"[">>), p_optional(fun 'space'/2), p_label('elements', p_optional(fun 'array_elements'/2)), p_optional(fun 'space'/2), p_string(<<"]">>)]))(I,D) end, fun(Node, _Idx) -> 144 | proplists:get_value(elements, Node) 145 | end). 146 | 147 | -spec 'array_elements'(input(), index()) -> parse_result(). 148 | 'array_elements'(Input, Index) -> 149 | p(Input, Index, 'array_elements', fun(I,D) -> (p_seq([p_label('head', p_zero_or_more(p_seq([fun 'expr'/2, p_optional(fun 'space'/2), p_string(<<",">>), p_optional(fun 'space'/2)]))), p_label('tail', fun 'expr'/2)]))(I,D) end, fun(Node, _Idx) -> 150 | Head = lists:flatmap(fun([E, _, _, _]) -> E end, proplists:get_value(head, Node)), 151 | Tail = proplists:get_value(tail, Node), 152 | {array, lists:append(Head, Tail)} 153 | end). 154 | 155 | -spec 'literal'(input(), index()) -> parse_result(). 156 | 'literal'(Input, Index) -> 157 | p(Input, Index, 'literal', fun(I,D) -> (p_choose([fun 'boolean'/2, fun 'float'/2, fun 'integer'/2, fun 'string'/2]))(I,D) end, fun(Node, Idx) ->transform('literal', Node, Idx) end). 158 | 159 | -spec 'boolean'(input(), index()) -> parse_result(). 160 | 'boolean'(Input, Index) -> 161 | p(Input, Index, 'boolean', fun(I,D) -> (p_choose([fun 'boolean_true'/2, fun 'boolean_false'/2]))(I,D) end, fun(Node, Idx) ->transform('boolean', Node, Idx) end). 162 | 163 | -spec 'boolean_true'(input(), index()) -> parse_result(). 164 | 'boolean_true'(Input, Index) -> 165 | p(Input, Index, 'boolean_true', fun(I,D) -> (p_string(<<"true">>))(I,D) end, fun(_Node, _Idx) ->{boolean, true} end). 166 | 167 | -spec 'boolean_false'(input(), index()) -> parse_result(). 168 | 'boolean_false'(Input, Index) -> 169 | p(Input, Index, 'boolean_false', fun(I,D) -> (p_string(<<"false">>))(I,D) end, fun(_Node, _Idx) ->{boolean, false} end). 170 | 171 | -spec 'integer'(input(), index()) -> parse_result(). 172 | 'integer'(Input, Index) -> 173 | p(Input, Index, 'integer', fun(I,D) -> (p_seq([p_optional(p_string(<<"-">>)), p_choose([p_string(<<"0">>), p_seq([p_charclass(<<"[1-9]">>), p_zero_or_more(p_charclass(<<"[0-9]">>))])])]))(I,D) end, fun(Node, _Idx) -> 174 | Number = iolist_to_binary(Node), 175 | {integer, binary_to_integer(Number)} 176 | end). 177 | 178 | -spec 'float'(input(), index()) -> parse_result(). 179 | 'float'(Input, Index) -> 180 | p(Input, Index, 'float', fun(I,D) -> (p_seq([p_optional(p_string(<<"-">>)), p_seq([p_one_or_more(p_charclass(<<"[0-9]">>)), p_string(<<".">>), p_one_or_more(p_charclass(<<"[0-9]">>))])]))(I,D) end, fun(Node, _Idx) -> 181 | Number = iolist_to_binary(Node), 182 | {float, binary_to_float(Number)} 183 | end). 184 | 185 | -spec 'string'(input(), index()) -> parse_result(). 186 | 'string'(Input, Index) -> 187 | p(Input, Index, 'string', fun(I,D) -> (p_choose([fun 'string_double'/2, fun 'string_single'/2]))(I,D) end, fun(Node, Idx) ->transform('string', Node, Idx) end). 188 | 189 | -spec 'string_double'(input(), index()) -> parse_result(). 190 | 'string_double'(Input, Index) -> 191 | p(Input, Index, 'string_double', fun(I,D) -> (p_seq([p_string(<<"\"">>), p_label('chars', p_zero_or_more(p_seq([p_not(p_string(<<"\"">>)), p_choose([p_string(<<"\\\\">>), p_string(<<"\\\"">>), p_anything()])]))), p_string(<<"\"">>)]))(I,D) end, fun(Node, _Idx) ->{string, iolist_to_binary(proplists:get_value(chars, Node))} end). 192 | 193 | -spec 'string_single'(input(), index()) -> parse_result(). 194 | 'string_single'(Input, Index) -> 195 | p(Input, Index, 'string_single', fun(I,D) -> (p_seq([p_string(<<"\'">>), p_label('chars', p_zero_or_more(p_seq([p_not(p_string(<<"\'">>)), p_choose([p_string(<<"\\\\">>), p_string(<<"\\\'">>), p_anything()])]))), p_string(<<"\'">>)]))(I,D) end, fun(Node, _Idx) ->{string, iolist_to_binary(proplists:get_value(chars, Node))} end). 196 | 197 | -spec 'ident'(input(), index()) -> parse_result(). 198 | 'ident'(Input, Index) -> 199 | p(Input, Index, 'ident', fun(I,D) -> (p_seq([p_charclass(<<"[a-zA-Z_]">>), p_zero_or_more(p_charclass(<<"[a-zA-Z0-9_]">>))]))(I,D) end, fun(Node, _Idx) ->{ident, iolist_to_binary(Node)} end). 200 | 201 | -spec 'space'(input(), index()) -> parse_result(). 202 | 'space'(Input, Index) -> 203 | p(Input, Index, 'space', fun(I,D) -> (p_zero_or_more(p_charclass(<<"[\s\t\n\s\r]">>)))(I,D) end, fun(Node, _Idx) ->Node end). 204 | 205 | 206 | transform(_,Node,_Index) -> Node. 207 | -file("peg_includes.hrl", 1). 208 | -type index() :: {{line, pos_integer()}, {column, pos_integer()}}. 209 | -type input() :: binary(). 210 | -type parse_failure() :: {fail, term()}. 211 | -type parse_success() :: {term(), input(), index()}. 212 | -type parse_result() :: parse_failure() | parse_success(). 213 | -type parse_fun() :: fun((input(), index()) -> parse_result()). 214 | -type xform_fun() :: fun((input(), index()) -> term()). 215 | 216 | -spec p(input(), index(), atom(), parse_fun(), xform_fun()) -> parse_result(). 217 | p(Inp, StartIndex, Name, ParseFun, TransformFun) -> 218 | case get_memo(StartIndex, Name) of % See if the current reduction is memoized 219 | {ok, Memo} -> %Memo; % If it is, return the stored result 220 | Memo; 221 | _ -> % If not, attempt to parse 222 | Result = case ParseFun(Inp, StartIndex) of 223 | {fail,_} = Failure -> % If it fails, memoize the failure 224 | Failure; 225 | {Match, InpRem, NewIndex} -> % If it passes, transform and memoize the result. 226 | Transformed = TransformFun(Match, StartIndex), 227 | {Transformed, InpRem, NewIndex} 228 | end, 229 | memoize(StartIndex, Name, Result), 230 | Result 231 | end. 232 | 233 | -spec setup_memo() -> ets:tid(). 234 | setup_memo() -> 235 | put({parse_memo_table, ?MODULE}, ets:new(?MODULE, [set])). 236 | 237 | -spec release_memo() -> true. 238 | release_memo() -> 239 | ets:delete(memo_table_name()). 240 | 241 | -spec memoize(index(), atom(), parse_result()) -> true. 242 | memoize(Index, Name, Result) -> 243 | Memo = case ets:lookup(memo_table_name(), Index) of 244 | [] -> []; 245 | [{Index, Plist}] -> Plist 246 | end, 247 | ets:insert(memo_table_name(), {Index, [{Name, Result}|Memo]}). 248 | 249 | -spec get_memo(index(), atom()) -> {ok, term()} | {error, not_found}. 250 | get_memo(Index, Name) -> 251 | case ets:lookup(memo_table_name(), Index) of 252 | [] -> {error, not_found}; 253 | [{Index, Plist}] -> 254 | case proplists:lookup(Name, Plist) of 255 | {Name, Result} -> {ok, Result}; 256 | _ -> {error, not_found} 257 | end 258 | end. 259 | 260 | -spec memo_table_name() -> ets:tid(). 261 | memo_table_name() -> 262 | get({parse_memo_table, ?MODULE}). 263 | 264 | -ifdef(p_eof). 265 | -spec p_eof() -> parse_fun(). 266 | p_eof() -> 267 | fun(<<>>, Index) -> {eof, [], Index}; 268 | (_, Index) -> {fail, {expected, eof, Index}} end. 269 | -endif. 270 | 271 | -ifdef(p_optional). 272 | -spec p_optional(parse_fun()) -> parse_fun(). 273 | p_optional(P) -> 274 | fun(Input, Index) -> 275 | case P(Input, Index) of 276 | {fail,_} -> {[], Input, Index}; 277 | {_, _, _} = Success -> Success 278 | end 279 | end. 280 | -endif. 281 | 282 | -ifdef(p_not). 283 | -spec p_not(parse_fun()) -> parse_fun(). 284 | p_not(P) -> 285 | fun(Input, Index)-> 286 | case P(Input,Index) of 287 | {fail,_} -> 288 | {[], Input, Index}; 289 | {Result, _, _} -> {fail, {expected, {no_match, Result},Index}} 290 | end 291 | end. 292 | -endif. 293 | 294 | -ifdef(p_assert). 295 | -spec p_assert(parse_fun()) -> parse_fun(). 296 | p_assert(P) -> 297 | fun(Input,Index) -> 298 | case P(Input,Index) of 299 | {fail,_} = Failure-> Failure; 300 | _ -> {[], Input, Index} 301 | end 302 | end. 303 | -endif. 304 | 305 | -ifdef(p_seq). 306 | -spec p_seq([parse_fun()]) -> parse_fun(). 307 | p_seq(P) -> 308 | fun(Input, Index) -> 309 | p_all(P, Input, Index, []) 310 | end. 311 | 312 | -spec p_all([parse_fun()], input(), index(), [term()]) -> parse_result(). 313 | p_all([], Inp, Index, Accum ) -> {lists:reverse( Accum ), Inp, Index}; 314 | p_all([P|Parsers], Inp, Index, Accum) -> 315 | case P(Inp, Index) of 316 | {fail, _} = Failure -> Failure; 317 | {Result, InpRem, NewIndex} -> p_all(Parsers, InpRem, NewIndex, [Result|Accum]) 318 | end. 319 | -endif. 320 | 321 | -ifdef(p_choose). 322 | -spec p_choose([parse_fun()]) -> parse_fun(). 323 | p_choose(Parsers) -> 324 | fun(Input, Index) -> 325 | p_attempt(Parsers, Input, Index, none) 326 | end. 327 | 328 | -spec p_attempt([parse_fun()], input(), index(), none | parse_failure()) -> parse_result(). 329 | p_attempt([], _Input, _Index, Failure) -> Failure; 330 | p_attempt([P|Parsers], Input, Index, FirstFailure)-> 331 | case P(Input, Index) of 332 | {fail, _} = Failure -> 333 | case FirstFailure of 334 | none -> p_attempt(Parsers, Input, Index, Failure); 335 | _ -> p_attempt(Parsers, Input, Index, FirstFailure) 336 | end; 337 | Result -> Result 338 | end. 339 | -endif. 340 | 341 | -ifdef(p_zero_or_more). 342 | -spec p_zero_or_more(parse_fun()) -> parse_fun(). 343 | p_zero_or_more(P) -> 344 | fun(Input, Index) -> 345 | p_scan(P, Input, Index, []) 346 | end. 347 | -endif. 348 | 349 | -ifdef(p_one_or_more). 350 | -spec p_one_or_more(parse_fun()) -> parse_fun(). 351 | p_one_or_more(P) -> 352 | fun(Input, Index)-> 353 | Result = p_scan(P, Input, Index, []), 354 | case Result of 355 | {[_|_], _, _} -> 356 | Result; 357 | _ -> 358 | {fail, {expected, Failure, _}} = P(Input,Index), 359 | {fail, {expected, {at_least_one, Failure}, Index}} 360 | end 361 | end. 362 | -endif. 363 | 364 | -ifdef(p_label). 365 | -spec p_label(atom(), parse_fun()) -> parse_fun(). 366 | p_label(Tag, P) -> 367 | fun(Input, Index) -> 368 | case P(Input, Index) of 369 | {fail,_} = Failure -> 370 | Failure; 371 | {Result, InpRem, NewIndex} -> 372 | {{Tag, Result}, InpRem, NewIndex} 373 | end 374 | end. 375 | -endif. 376 | 377 | -ifdef(p_scan). 378 | -spec p_scan(parse_fun(), input(), index(), [term()]) -> {[term()], input(), index()}. 379 | p_scan(_, <<>>, Index, Accum) -> {lists:reverse(Accum), <<>>, Index}; 380 | p_scan(P, Inp, Index, Accum) -> 381 | case P(Inp, Index) of 382 | {fail,_} -> {lists:reverse(Accum), Inp, Index}; 383 | {Result, InpRem, NewIndex} -> p_scan(P, InpRem, NewIndex, [Result | Accum]) 384 | end. 385 | -endif. 386 | 387 | -ifdef(p_string). 388 | -spec p_string(binary()) -> parse_fun(). 389 | p_string(S) -> 390 | Length = erlang:byte_size(S), 391 | fun(Input, Index) -> 392 | try 393 | <> = Input, 394 | {S, Rest, p_advance_index(S, Index)} 395 | catch 396 | error:{badmatch,_} -> {fail, {expected, {string, S}, Index}} 397 | end 398 | end. 399 | -endif. 400 | 401 | -ifdef(p_anything). 402 | -spec p_anything() -> parse_fun(). 403 | p_anything() -> 404 | fun(<<>>, Index) -> {fail, {expected, any_character, Index}}; 405 | (Input, Index) when is_binary(Input) -> 406 | <> = Input, 407 | {<>, Rest, p_advance_index(<>, Index)} 408 | end. 409 | -endif. 410 | 411 | -ifdef(p_charclass). 412 | -spec p_charclass(string() | binary()) -> parse_fun(). 413 | p_charclass(Class) -> 414 | {ok, RE} = re:compile(Class, [unicode, dotall]), 415 | fun(Inp, Index) -> 416 | case re:run(Inp, RE, [anchored]) of 417 | {match, [{0, Length}|_]} -> 418 | {Head, Tail} = erlang:split_binary(Inp, Length), 419 | {Head, Tail, p_advance_index(Head, Index)}; 420 | _ -> {fail, {expected, {character_class, binary_to_list(Class)}, Index}} 421 | end 422 | end. 423 | -endif. 424 | 425 | -ifdef(p_regexp). 426 | -spec p_regexp(binary()) -> parse_fun(). 427 | p_regexp(Regexp) -> 428 | {ok, RE} = re:compile(Regexp, [unicode, dotall, anchored]), 429 | fun(Inp, Index) -> 430 | case re:run(Inp, RE) of 431 | {match, [{0, Length}|_]} -> 432 | {Head, Tail} = erlang:split_binary(Inp, Length), 433 | {Head, Tail, p_advance_index(Head, Index)}; 434 | _ -> {fail, {expected, {regexp, binary_to_list(Regexp)}, Index}} 435 | end 436 | end. 437 | -endif. 438 | 439 | -ifdef(line). 440 | -spec line(index() | term()) -> pos_integer() | undefined. 441 | line({{line,L},_}) -> L; 442 | line(_) -> undefined. 443 | -endif. 444 | 445 | -ifdef(column). 446 | -spec column(index() | term()) -> pos_integer() | undefined. 447 | column({_,{column,C}}) -> C; 448 | column(_) -> undefined. 449 | -endif. 450 | 451 | -spec p_advance_index(input() | unicode:charlist() | pos_integer(), index()) -> index(). 452 | p_advance_index(MatchedInput, Index) when is_list(MatchedInput) orelse is_binary(MatchedInput)-> % strings 453 | lists:foldl(fun p_advance_index/2, Index, unicode:characters_to_list(MatchedInput)); 454 | p_advance_index(MatchedInput, Index) when is_integer(MatchedInput) -> % single characters 455 | {{line, Line}, {column, Col}} = Index, 456 | case MatchedInput of 457 | $\n -> {{line, Line+1}, {column, 1}}; 458 | _ -> {{line, Line}, {column, Col+1}} 459 | end. 460 | --------------------------------------------------------------------------------