├── .check.exs ├── .credo.exs ├── .formatter.exs ├── .github ├── dependabot.yml └── workflows │ └── elixir.yml ├── .gitignore ├── .tool-versions ├── CHANGELOG.md ├── LICENSE ├── README.md ├── benchmarks └── options.exs ├── config └── config.exs ├── documentation ├── .gitkeep ├── how_to │ ├── setup-autocomplete.md │ ├── split-up-large-dsls.md │ ├── upgrade-to-2.0.md │ └── writing-extensions.md └── tutorials │ └── get-started-with-spark.md ├── lib ├── mix │ ├── helpers.ex │ └── tasks │ │ ├── spark.cheat_sheets.ex │ │ ├── spark.cheat_sheets_in_search.ex │ │ ├── spark.formatter.ex │ │ ├── spark.install.ex │ │ └── spark.replace_doc_links.ex ├── spark.ex └── spark │ ├── cheat_sheet.ex │ ├── code_helpers.ex │ ├── docs.ex │ ├── dsl.ex │ ├── dsl │ ├── builder.ex │ ├── entity.ex │ ├── extension.ex │ ├── extension │ │ ├── entity.ex │ │ ├── entity_option.ex │ │ ├── imports.ex │ │ └── section_option.ex │ ├── fragment.ex │ ├── patch │ │ └── add_entity.ex │ ├── section.ex │ ├── transformer.ex │ ├── verifier.ex │ └── verifiers │ │ └── verify_entity_uniqueness.ex │ ├── elixir_sense │ ├── aliases.ex │ ├── entity.ex │ └── plugin.ex │ ├── error │ └── dsl_error.ex │ ├── formatter.ex │ ├── igniter.ex │ ├── info_generator.ex │ ├── options │ ├── docs.ex │ ├── helpers.ex │ ├── options.ex │ ├── validation_error.ex │ └── validator.ex │ └── options_helpers.ex ├── mix.exs ├── mix.lock ├── test ├── add_extension_test.exs ├── code_helpers_test.exs ├── cross_extension_recursive_patch_test.exs ├── dsl_test.exs ├── elixir_sense │ └── plugin_test.exs ├── formatter_test.exs ├── igniter_test.exs ├── options │ ├── impl_validator_test.exs │ ├── mixed_list_test.exs │ └── validator_test.exs ├── recursive_test.exs ├── spark_test.exs ├── support │ ├── contact │ │ ├── contact.ex │ │ ├── contact_patcher.ex │ │ ├── contacter.ex │ │ ├── fragment.ex │ │ ├── info.ex │ │ ├── ted_dansen.ex │ │ └── verifiers │ │ │ └── verify_not_gandalf.ex │ ├── example_contacter.ex │ ├── example_options.ex │ ├── info │ │ ├── my_extension.ex │ │ └── my_extension_info.ex │ ├── recursive │ │ ├── atom.ex │ │ ├── info.ex │ │ ├── recursive.ex │ │ └── step.ex │ └── top_level │ │ ├── info.ex │ │ └── top_level.ex ├── test_helper.exs ├── top_level_test.exs └── transformer_test.exs └── usage-rules.md /.check.exs: -------------------------------------------------------------------------------- 1 | [ 2 | ## all available options with default values (see `mix check` docs for description) 3 | # parallel: true, 4 | # skipped: true, 5 | 6 | ## list of tools (see `mix check` docs for defaults) 7 | tools: [ 8 | ## curated tools may be disabled (e.g. the check for compilation warnings) 9 | # {:compiler, false}, 10 | 11 | ## ...or adjusted (e.g. use one-line formatter for more compact credo output) 12 | # {:credo, "mix credo --format oneline"}, 13 | ## custom new tools may be added (mix tasks or arbitrary commands) 14 | # {:my_mix_task, command: "mix release", env: %{"MIX_ENV" => "prod"}}, 15 | # {:my_arbitrary_tool, command: "npm test", cd: "assets"}, 16 | # {:my_arbitrary_script, command: ["my_script", "argument with spaces"], cd: "scripts"} 17 | ] 18 | ] 19 | -------------------------------------------------------------------------------- /.credo.exs: -------------------------------------------------------------------------------- 1 | # This file contains the configuration for Credo and you are probably reading 2 | # this after creating it with `mix credo.gen.config`. 3 | # 4 | # If you find anything wrong or unclear in this file, please report an 5 | # issue on GitHub: https://github.com/rrrene/credo/issues 6 | # 7 | %{ 8 | # 9 | # You can have as many configs as you like in the `configs:` field. 10 | configs: [ 11 | %{ 12 | # 13 | # Run any config using `mix credo -C `. If no config name is given 14 | # "default" is used. 15 | # 16 | name: "default", 17 | # 18 | # These are the files included in the analysis: 19 | files: %{ 20 | # 21 | # You can give explicit globs or simply directories. 22 | # In the latter case `**/*.{ex,exs}` will be used. 23 | # 24 | included: [ 25 | "lib/", 26 | "src/", 27 | "test/", 28 | "web/", 29 | "apps/*/lib/", 30 | "apps/*/src/", 31 | "apps/*/test/", 32 | "apps/*/web/" 33 | ], 34 | excluded: [~r"/_build/", ~r"/deps/", ~r"/node_modules/"] 35 | }, 36 | # 37 | # Load and configure plugins here: 38 | # 39 | plugins: [], 40 | # 41 | # If you create your own checks, you must specify the source files for 42 | # them here, so they can be loaded by Credo before running the analysis. 43 | # 44 | requires: [], 45 | # 46 | # If you want to enforce a style guide and need a more traditional linting 47 | # experience, you can change `strict` to `true` below: 48 | # 49 | strict: false, 50 | # 51 | # To modify the timeout for parsing files, change this value: 52 | # 53 | parse_timeout: 5000, 54 | # 55 | # If you want to use uncolored output by default, you can change `color` 56 | # to `false` below: 57 | # 58 | color: true, 59 | # 60 | # You can customize the parameters of any check by adding a second element 61 | # to the tuple. 62 | # 63 | # To disable a check put `false` as second element: 64 | # 65 | # {Credo.Check.Design.DuplicatedCode, false} 66 | # 67 | checks: %{ 68 | enabled: [ 69 | # 70 | ## Consistency Checks 71 | # 72 | {Credo.Check.Consistency.ExceptionNames, []}, 73 | {Credo.Check.Consistency.LineEndings, []}, 74 | {Credo.Check.Consistency.ParameterPatternMatching, []}, 75 | {Credo.Check.Consistency.SpaceAroundOperators, []}, 76 | {Credo.Check.Consistency.SpaceInParentheses, []}, 77 | {Credo.Check.Consistency.TabsOrSpaces, []}, 78 | 79 | # 80 | ## Design Checks 81 | # 82 | {Credo.Check.Design.AliasUsage, false}, 83 | {Credo.Check.Design.TagTODO, false}, 84 | {Credo.Check.Design.TagFIXME, []}, 85 | 86 | # 87 | ## Readability Checks 88 | # 89 | {Credo.Check.Readability.AliasOrder, []}, 90 | {Credo.Check.Readability.FunctionNames, []}, 91 | {Credo.Check.Readability.LargeNumbers, []}, 92 | {Credo.Check.Readability.MaxLineLength, false}, 93 | {Credo.Check.Readability.ModuleAttributeNames, []}, 94 | {Credo.Check.Readability.ModuleDoc, []}, 95 | {Credo.Check.Readability.ModuleNames, []}, 96 | {Credo.Check.Readability.ParenthesesInCondition, []}, 97 | {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []}, 98 | {Credo.Check.Readability.PipeIntoAnonymousFunctions, []}, 99 | {Credo.Check.Readability.PredicateFunctionNames, []}, 100 | {Credo.Check.Readability.PreferImplicitTry, []}, 101 | {Credo.Check.Readability.RedundantBlankLines, []}, 102 | {Credo.Check.Readability.Semicolons, []}, 103 | {Credo.Check.Readability.SpaceAfterCommas, []}, 104 | {Credo.Check.Readability.StringSigils, []}, 105 | {Credo.Check.Readability.TrailingBlankLine, []}, 106 | {Credo.Check.Readability.TrailingWhiteSpace, []}, 107 | {Credo.Check.Readability.UnnecessaryAliasExpansion, []}, 108 | {Credo.Check.Readability.VariableNames, []}, 109 | {Credo.Check.Readability.WithSingleClause, []}, 110 | 111 | # 112 | ## Refactoring Opportunities 113 | # 114 | {Credo.Check.Refactor.Apply, false}, 115 | {Credo.Check.Refactor.CondStatements, []}, 116 | {Credo.Check.Refactor.CyclomaticComplexity, false}, 117 | {Credo.Check.Refactor.FunctionArity, false}, 118 | {Credo.Check.Refactor.LongQuoteBlocks, false}, 119 | {Credo.Check.Refactor.MatchInCondition, []}, 120 | {Credo.Check.Refactor.MapJoin, []}, 121 | {Credo.Check.Refactor.NegatedConditionsInUnless, []}, 122 | {Credo.Check.Refactor.NegatedConditionsWithElse, []}, 123 | {Credo.Check.Refactor.Nesting, [max_nesting: 7]}, 124 | {Credo.Check.Refactor.UnlessWithElse, []}, 125 | {Credo.Check.Refactor.WithClauses, []}, 126 | {Credo.Check.Refactor.FilterFilter, []}, 127 | {Credo.Check.Refactor.RejectReject, []}, 128 | {Credo.Check.Refactor.RedundantWithClauseResult, []}, 129 | 130 | # 131 | ## Warnings 132 | # 133 | {Credo.Check.Warning.ApplicationConfigInModuleAttribute, []}, 134 | {Credo.Check.Warning.BoolOperationOnSameValues, []}, 135 | {Credo.Check.Warning.ExpensiveEmptyEnumCheck, []}, 136 | {Credo.Check.Warning.IExPry, []}, 137 | {Credo.Check.Warning.IoInspect, []}, 138 | {Credo.Check.Warning.OperationOnSameValues, []}, 139 | {Credo.Check.Warning.OperationWithConstantResult, []}, 140 | {Credo.Check.Warning.RaiseInsideRescue, []}, 141 | {Credo.Check.Warning.SpecWithStruct, []}, 142 | {Credo.Check.Warning.WrongTestFileExtension, []}, 143 | {Credo.Check.Warning.UnusedEnumOperation, []}, 144 | {Credo.Check.Warning.UnusedFileOperation, []}, 145 | {Credo.Check.Warning.UnusedKeywordOperation, []}, 146 | {Credo.Check.Warning.UnusedListOperation, []}, 147 | {Credo.Check.Warning.UnusedPathOperation, []}, 148 | {Credo.Check.Warning.UnusedRegexOperation, []}, 149 | {Credo.Check.Warning.UnusedStringOperation, []}, 150 | {Credo.Check.Warning.UnusedTupleOperation, []}, 151 | {Credo.Check.Warning.UnsafeExec, []} 152 | ], 153 | disabled: [ 154 | # 155 | # Checks scheduled for next check update (opt-in for now, just replace `false` with `[]`) 156 | 157 | # 158 | # Controversial and experimental checks (opt-in, just move the check to `:enabled` 159 | # and be sure to use `mix credo --strict` to see low priority checks) 160 | # 161 | {Credo.Check.Consistency.MultiAliasImportRequireUse, []}, 162 | {Credo.Check.Consistency.UnusedVariableNames, []}, 163 | {Credo.Check.Design.DuplicatedCode, []}, 164 | {Credo.Check.Design.SkipTestWithoutComment, []}, 165 | {Credo.Check.Readability.AliasAs, []}, 166 | {Credo.Check.Readability.BlockPipe, []}, 167 | {Credo.Check.Readability.ImplTrue, []}, 168 | {Credo.Check.Readability.MultiAlias, []}, 169 | {Credo.Check.Readability.NestedFunctionCalls, []}, 170 | {Credo.Check.Readability.SeparateAliasRequire, []}, 171 | {Credo.Check.Readability.SingleFunctionToBlockPipe, []}, 172 | {Credo.Check.Readability.SinglePipe, []}, 173 | {Credo.Check.Readability.Specs, []}, 174 | {Credo.Check.Readability.StrictModuleLayout, []}, 175 | {Credo.Check.Readability.WithCustomTaggedTuple, []}, 176 | {Credo.Check.Refactor.ABCSize, []}, 177 | {Credo.Check.Refactor.AppendSingleItem, []}, 178 | {Credo.Check.Refactor.DoubleBooleanNegation, []}, 179 | {Credo.Check.Refactor.FilterReject, []}, 180 | {Credo.Check.Refactor.IoPuts, []}, 181 | {Credo.Check.Refactor.MapMap, []}, 182 | {Credo.Check.Refactor.ModuleDependencies, []}, 183 | {Credo.Check.Refactor.NegatedIsNil, []}, 184 | {Credo.Check.Refactor.PipeChainStart, []}, 185 | {Credo.Check.Refactor.RejectFilter, []}, 186 | {Credo.Check.Refactor.VariableRebinding, []}, 187 | {Credo.Check.Warning.LazyLogging, []}, 188 | {Credo.Check.Warning.LeakyEnvironment, []}, 189 | {Credo.Check.Warning.MapGetUnsafePass, []}, 190 | {Credo.Check.Warning.MixEnv, []}, 191 | {Credo.Check.Warning.UnsafeToAtom, []} 192 | 193 | # {Credo.Check.Refactor.MapInto, []}, 194 | 195 | # 196 | # Custom checks can be created using `mix credo.gen.check`. 197 | # 198 | ] 199 | } 200 | } 201 | ] 202 | } 203 | -------------------------------------------------------------------------------- /.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | plugins: [Spark.Formatter], 4 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] 5 | ] 6 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: mix 4 | versioning-strategy: lockfile-only 5 | directory: "/" 6 | schedule: 7 | interval: "daily" 8 | -------------------------------------------------------------------------------- /.github/workflows/elixir.yml: -------------------------------------------------------------------------------- 1 | name: Spark CI 2 | 3 | on: 4 | push: 5 | tags: 6 | - "v*" 7 | branches: [main] 8 | pull_request: 9 | branches: [main] 10 | jobs: 11 | ash-ci: 12 | uses: ash-project/ash/.github/workflows/ash-ci.yml@main 13 | secrets: 14 | HEX_API_KEY: ${{ secrets.HEX_API_KEY }} 15 | with: 16 | spark-formatter: false 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directory Mix downloads your dependencies sources to. 8 | /deps/ 9 | 10 | # Where third-party dependencies like ExDoc output generated docs. 11 | /doc/ 12 | 13 | # Ignore .fetch files in case you like to edit your project deps locally. 14 | /.fetch 15 | 16 | # If the VM crashes, it generates a dump, let's ignore it too. 17 | erl_crash.dump 18 | 19 | # Also ignore archive artifacts (built via "mix archive.build"). 20 | *.ez 21 | 22 | # Ignore package tarball (built via "mix hex.build"). 23 | spark-*.tar 24 | 25 | # Temporary files, for example, from tests. 26 | /tmp/ 27 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | erlang 27.0.1 2 | elixir 1.18.4 3 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Zachary Scott Daniel 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Spark 2 | 3 | Spark helps you build powerful and well documented DSLs that come with useful tooling out of the box. DSLs are declared using simple structs, and every DSL has the ability to be extended by the end user. Spark powers all of the DSLs in Ash Framework. 4 | 5 | See the [getting started guide](https://ash-hq.org/docs/guides/spark/latest/get-started-with-spark) 6 | -------------------------------------------------------------------------------- /benchmarks/options.exs: -------------------------------------------------------------------------------- 1 | schema = [ 2 | foo: [ 3 | type: :string, 4 | required: true 5 | ], 6 | bar: [ 7 | type: :string, 8 | default: "default" 9 | ], 10 | baz: [ 11 | type: :integer, 12 | default: 10 13 | ] 14 | ] 15 | 16 | new_schema = Spark.Options.new!(schema) 17 | 18 | defmodule MySchema do 19 | use Spark.Options.Validator, schema: schema 20 | end 21 | 22 | # prime 23 | Spark.Options.validate!([foo: "foo", bar: "bar", baz: 20], schema) 24 | Spark.Options.validate!([foo: "foo", bar: "bar", baz: 20], new_schema) 25 | MySchema.validate!([foo: "foo", bar: "bar", baz: 20]) 26 | 27 | Benchee.run( 28 | %{ 29 | "existing" => fn -> 30 | options = Spark.Options.validate!([foo: "foo", bar: "bar", baz: 20], schema) 31 | _foo = options[:foo] 32 | _bar = options[:bar] 33 | _foo = options[:foo] 34 | _bar = options[:bar] 35 | _foo = options[:foo] 36 | _bar = options[:bar] 37 | end, 38 | "existing with built schema" => fn -> 39 | options = Spark.Options.validate!([foo: "foo", bar: "bar", baz: 20], new_schema) 40 | 41 | _foo = options[:foo] 42 | _bar = options[:bar] 43 | _foo = options[:foo] 44 | _bar = options[:bar] 45 | _foo = options[:foo] 46 | _bar = options[:bar] 47 | end, 48 | "validator" => fn -> 49 | options = MySchema.validate!([foo: "foo", bar: "bar", baz: 20]) 50 | _foo = options.foo 51 | _bar = options.bar 52 | _foo = options.foo 53 | _bar = options.bar 54 | _foo = options.foo 55 | _bar = options.bar 56 | end, 57 | "validator to options" => fn -> 58 | options = MySchema.validate!([foo: "foo", bar: "bar", baz: 20]) |> MySchema.to_options() 59 | 60 | _foo = options[:foo] 61 | _bar = options[:bar] 62 | _foo = options[:foo] 63 | _bar = options[:bar] 64 | _foo = options[:foo] 65 | _bar = options[:bar] 66 | _foo = options[:foo] 67 | _bar = options[:bar] 68 | end 69 | }, 70 | memory_time: 2 71 | ) 72 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | config :spark, :enforce_spark_elixir_sense_behaviours?, true 4 | 5 | if Mix.env() == :dev do 6 | config :git_ops, 7 | mix_project: Spark.MixProject, 8 | changelog_file: "CHANGELOG.md", 9 | repository_url: "https://github.com/ash-project/spark", 10 | # Instructs the tool to manage your mix version in your `mix.exs` file 11 | # See below for more information 12 | manage_mix_version?: true, 13 | # Instructs the tool to manage the version in your README.md 14 | # Pass in `true` to use `"README.md"` or a string to customize 15 | manage_readme_version: "documentation/tutorials/get-started-with-spark.md", 16 | version_tag_prefix: "v" 17 | end 18 | -------------------------------------------------------------------------------- /documentation/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ash-project/spark/b0dc2dedb4127a7a0cd5e49cbc894e7a1a610294/documentation/.gitkeep -------------------------------------------------------------------------------- /documentation/how_to/setup-autocomplete.md: -------------------------------------------------------------------------------- 1 | # Setting up autocomplete 2 | 3 | ## Compatibility 4 | 5 | Autocomplete is enhanced by a plugin to ElixirSense, and therefore it only works for those who are using [ElixirLS](https://github.com/elixir-lsp/elixir-ls). We may consider adding the same extension to other language serers in the future. 6 | 7 | ## Setting it up 8 | 9 | ### DSL Modules 10 | 11 | Inside of DSL modules, there is nothing you need to do! Autocomplete "just works" because ElixirSense finds the extension present inside of the Spark dependency. 12 | 13 | ### Options to functions using `Spark.Options` 14 | 15 | To get autocomplete with documentation for the options to your functions, you need to add an `@doc` metadata that contains the index of the argument that this applies to, and the schema. Here is a complete example: 16 | 17 | ```elixir 18 | @schema [ 19 | verbose?: [ 20 | type: :boolean, 21 | doc: "Whether or not to log verbose messages to the console", 22 | default: false 23 | ] 24 | ] 25 | 26 | @doc spark_opts: [{1, @schema}] 27 | def do_something(arg, opts \\ []) do 28 | opts = Spark.Options.validate!(opts, @schema) 29 | 30 | ... 31 | end 32 | ``` 33 | -------------------------------------------------------------------------------- /documentation/how_to/split-up-large-dsls.md: -------------------------------------------------------------------------------- 1 | # Splitting Up Large DSLs 2 | 3 | When building large DSLs, we face similar problems as things like large configuration files. It can be hard to find what we're looking for, and we can end up scrolling through a lot of DSL code to find what we're interested in. We generally suggest avoiding splitting up your DSLs by default, but it is important to know how to do so when the need arises. 4 | 5 | ## Fragments 6 | 7 | Spark offers a tool called `Spark.Dsl.Fragment`, which allows you to compose a single DSL from multiple smaller DSL modules. There are a few important properties and caveats to understand: 8 | 9 | 1. Fragments are _not_ designed for sharing code between instances of a spark DSL. They are not dynamic. For creating behavior that extends across multiple instances of a DSL, you should write an extension. 10 | 11 | 2. A DSL has all extensions that any of its fragments has. 12 | 13 | 3. Fragments must express what they are a fragment _of_. 14 | 15 | ### Example 16 | 17 | ```elixir 18 | defmodule MyApp.Accounts.User.Fragments.DataLayer do 19 | use Spark.Dsl.Fragment, 20 | of: Ash.Resource, 21 | data_layer: AshPostgres.DataLayer 22 | 23 | postgres do 24 | table "users" 25 | repo MyApp.Repo 26 | ... 27 | end 28 | end 29 | 30 | defmodule MyApp.Accounts.User do 31 | use Ash.Resource, 32 | fragments: [MyApp.Accounts.User.Fragments.DataLayer] 33 | 34 | ... 35 | end 36 | ``` 37 | -------------------------------------------------------------------------------- /documentation/how_to/upgrade-to-2.0.md: -------------------------------------------------------------------------------- 1 | # Upgrading to 2.0 2 | 3 | A 2.0 release was published with a minor breaking change. We decided to vendor `NimbleOptions` (copy their code into our codebase) so that we could make some necessary modifications to it. What this means for users is primarily that: 4 | 5 | 1. we no longer depend on `NimbleOptions` 6 | 2. if you are matching on `NimbleOptions.ValidationError` you will need to update your code to match on `Spark.Options.ValidationError` -------------------------------------------------------------------------------- /documentation/how_to/writing-extensions.md: -------------------------------------------------------------------------------- 1 | # Writing Extensions 2 | 3 | Writing extensions generally involves three main components. 4 | 5 | ## The DSL declaration 6 | 7 | The DSL is declared as a series of `Spark.Dsl.Section`, which can contain `Spark.Dsl.Entity` and further `Spark.Dsl.Section` structs. See `Spark.Dsl.Section` and `Spark.Dsl.Entity` for more information. 8 | 9 | ## Transformers 10 | 11 | Extension writing gets a bit more complicated when you get into the world of transformers, but this is also where a lot of the power is. Each transformer can declare other transformers it must go before or after, and then is given the opportunity to modify the entirety of the DSL it is extending up to that point. This allows extensions to make rich modifications to the structure in question. See `Spark.Dsl.Transformer` for more information 12 | 13 | ## Introspection 14 | 15 | Use functions in `Spark.Dsl.Extension` to retrieve the stored values from the DSL and expose them in a module. The convention is to place functions for something like `MyApp.MyExtension` in `MyApp.MyExtension.Info`. Using introspection functions like this allows for a richer introspection API (i.e not just getting and retrieving raw values), and it also allows us to add type specs and documentation, which is helpful when working generically. I.e `module_as_variable.table()` can't be known by dialyzer, whereas `Extension.table(module)` can be. 16 | -------------------------------------------------------------------------------- /lib/mix/helpers.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Mix.Helpers do 2 | @moduledoc false 3 | 4 | def extension_name(extension, opts) do 5 | extension 6 | |> inspect() 7 | |> strip_prefix(opts) 8 | |> String.trim_trailing(".Dsl") 9 | |> String.split(".") 10 | |> Enum.join(".") 11 | end 12 | 13 | defp strip_prefix(extension, opts) do 14 | if opts[:strip_prefix] do 15 | String.trim_leading(extension, opts[:strip_prefix]) 16 | else 17 | extension 18 | end 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /lib/mix/tasks/spark.cheat_sheets.ex: -------------------------------------------------------------------------------- 1 | defmodule Mix.Tasks.Spark.CheatSheets.Docs do 2 | @moduledoc false 3 | 4 | def short_doc do 5 | "Creates cheat sheets for each Extension provided. Useful for CI with `--check` flag." 6 | end 7 | 8 | def example do 9 | "mix spark.cheat_sheets --extensions MyApp.Foo,MyApp.Bar" 10 | end 11 | 12 | def long_doc do 13 | """ 14 | #{short_doc()} 15 | 16 | ## Example 17 | 18 | ```bash 19 | #{example()} 20 | ``` 21 | 22 | ## Options 23 | 24 | * `--extensions` - The list of extensions to generate cheat sheets for 25 | """ 26 | end 27 | end 28 | 29 | if Code.ensure_loaded?(Igniter) do 30 | defmodule Mix.Tasks.Spark.CheatSheets do 31 | @shortdoc "#{__MODULE__.Docs.short_doc()}" 32 | 33 | @moduledoc __MODULE__.Docs.long_doc() 34 | 35 | use Igniter.Mix.Task 36 | 37 | @impl Igniter.Mix.Task 38 | def info(_argv, _composing_task) do 39 | %Igniter.Mix.Task.Info{ 40 | group: :spark, 41 | schema: [extensions: :csv], 42 | required: [:extensions] 43 | } 44 | end 45 | 46 | @impl Igniter.Mix.Task 47 | def igniter(igniter) do 48 | igniter.args.options[:extensions] 49 | |> Enum.map(&Igniter.Project.Module.parse/1) 50 | |> Enum.uniq() 51 | |> Enum.reduce(igniter, fn extension, igniter -> 52 | cheat_sheet = Spark.CheatSheet.cheat_sheet(extension) 53 | 54 | extension_name = Spark.Mix.Helpers.extension_name(extension, []) 55 | 56 | filename = "documentation/dsls/DSL-#{extension_name}.md" 57 | 58 | Igniter.create_or_update_file(igniter, filename, cheat_sheet, fn source -> 59 | Rewrite.Source.update(source, :content, cheat_sheet) 60 | end) 61 | end) 62 | end 63 | end 64 | else 65 | defmodule Mix.Tasks.Spark.CheatSheets do 66 | @shortdoc "#{__MODULE__.Docs.short_doc()} | Install `igniter` to use" 67 | 68 | @moduledoc __MODULE__.Docs.long_doc() 69 | 70 | use Mix.Task 71 | 72 | def run(_argv) do 73 | Mix.shell().error(""" 74 | The task 'spark.cheat_sheets' requires igniter. Please install igniter and try again. 75 | 76 | For more information, see: https://hexdocs.pm/igniter/readme.html#installation 77 | """) 78 | 79 | exit({:shutdown, 1}) 80 | end 81 | end 82 | end 83 | -------------------------------------------------------------------------------- /lib/mix/tasks/spark.cheat_sheets_in_search.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Jason) do 2 | defmodule Mix.Tasks.Spark.CheatSheetsInSearch do 3 | @shortdoc "Includes generated cheat sheets in the search bar" 4 | @moduledoc @shortdoc 5 | use Mix.Task 6 | 7 | def run(opts) do 8 | IO.warn(""" 9 | You should switch to using `Spark.Docs.search_data_for(dsl_module)` instead of this task. 10 | 11 | i.e 12 | 13 | {"documentation/dsls/DSL-Ash.Resource.md", 14 | search_data: Spark.Docs.search_data_for(Ash.Resource.Dsl)}, 15 | """) 16 | 17 | Mix.Task.run("compile") 18 | 19 | {opts, _} = 20 | OptionParser.parse!(opts, 21 | switches: [strip_prefix: :string, check: :boolean, extensions: :string] 22 | ) 23 | 24 | unless opts[:extensions] do 25 | raise "Must supply a comma separated list of extensions to generate a .formatter.exs for" 26 | end 27 | 28 | extensions = 29 | opts[:extensions] 30 | |> String.split(",") 31 | |> Enum.map(&Module.concat([&1])) 32 | |> Enum.uniq() 33 | 34 | with {:ok, search_data_file, search_data} <- search_data_file(), 35 | {:ok, sidebar_items_file, sidebar_items} <- sidebar_items_file() do 36 | {search_data, sidebar_items} = 37 | Enum.reduce(extensions, {search_data, sidebar_items}, fn extension, acc -> 38 | add_extension_to_search_data(extension, acc, opts) 39 | end) 40 | 41 | File.write!(search_data_file, "searchData=" <> Jason.encode!(search_data)) 42 | File.write!(sidebar_items_file, "sidebarNodes=" <> Jason.encode!(sidebar_items)) 43 | else 44 | {:error, error} -> raise error 45 | end 46 | end 47 | 48 | defp search_data_file do 49 | "doc/dist/search_data-*.js" 50 | |> Path.wildcard() 51 | |> Enum.at(0) 52 | |> case do 53 | nil -> 54 | {:error, "No search_data file found"} 55 | 56 | file -> 57 | case File.read!(file) do 58 | "searchData=" <> contents -> 59 | {:ok, file, Jason.decode!(contents)} 60 | 61 | _ -> 62 | {:error, "search data js file was malformed"} 63 | end 64 | end 65 | end 66 | 67 | defp sidebar_items_file do 68 | "doc/dist/sidebar_items-*.js" 69 | |> Path.wildcard() 70 | |> Enum.at(0) 71 | |> case do 72 | nil -> 73 | {:error, "No sidebar_items file found"} 74 | 75 | file -> 76 | case File.read!(file) do 77 | "sidebarNodes=" <> contents -> 78 | {:ok, file, Jason.decode!(contents)} 79 | 80 | _ -> 81 | {:error, "sidebar items js file was malformed"} 82 | end 83 | end 84 | end 85 | 86 | defp add_extension_to_search_data(extension, acc, opts) do 87 | extension_name = Spark.Mix.Helpers.extension_name(extension, opts) 88 | 89 | acc = 90 | Enum.reduce(extension.sections(), acc, fn section, acc -> 91 | add_section_to_search_data(extension_name, section, acc) 92 | end) 93 | 94 | Enum.reduce( 95 | extension.dsl_patches(), 96 | acc, 97 | fn %Spark.Dsl.Patch.AddEntity{ 98 | section_path: section_path, 99 | entity: entity 100 | }, 101 | acc -> 102 | add_entity_to_search_data( 103 | extension_name, 104 | entity, 105 | acc, 106 | section_path 107 | ) 108 | end 109 | ) 110 | end 111 | 112 | defp add_section_to_search_data( 113 | extension_name, 114 | section, 115 | {search_data, sidebar_items}, 116 | path \\ [] 117 | ) do 118 | search_data = 119 | add_search_item( 120 | search_data, 121 | %{ 122 | "doc" => section.describe, 123 | "ref" => 124 | "#{dsl_search_name(extension_name)}.html##{Enum.join(path ++ [section.name], "-")}", 125 | "title" => "#{extension_name}.#{Enum.join(path ++ [section.name], ".")}", 126 | "type" => "DSL" 127 | } 128 | ) 129 | 130 | search_data = 131 | add_schema_to_search_data( 132 | search_data, 133 | extension_name, 134 | section.schema, 135 | path ++ [section.name] 136 | ) 137 | 138 | # sidebar_items = 139 | # add_schema_to_sidebar_items( 140 | # sidebar_items, 141 | # extension_name, 142 | # to_string(Enum.at(path, 0) || section.name), 143 | # section.schema, 144 | # section.deprecations, 145 | # path ++ [section.name] 146 | # ) 147 | 148 | acc = 149 | Enum.reduce( 150 | section.sections, 151 | {search_data, sidebar_items}, 152 | &add_section_to_search_data(extension_name, &1, &2, path ++ [section.name]) 153 | ) 154 | 155 | Enum.reduce( 156 | section.entities, 157 | acc, 158 | &add_entity_to_search_data(extension_name, &1, &2, path ++ [section.name]) 159 | ) 160 | end 161 | 162 | defp add_entity_to_search_data(extension_name, entity, {search_data, sidebar_items}, path) do 163 | path = path ++ [entity.name] 164 | dot_path = Enum.join(path, ".") 165 | dash_path = Enum.join(path, "-") 166 | 167 | search_data = 168 | add_search_item( 169 | search_data, 170 | %{ 171 | "doc" => entity.describe, 172 | "ref" => "#{dsl_search_name(extension_name)}.html##{dash_path}", 173 | "title" => "#{extension_name}.#{dot_path}", 174 | "type" => "DSL" 175 | } 176 | ) 177 | 178 | # tail_path = Enum.join(tl(path), ".") 179 | 180 | # sidebar_items = 181 | # add_sidebar_item( 182 | # sidebar_items, 183 | # extension_name, 184 | # to_string(Enum.at(path, 0)), 185 | # %{ 186 | # "anchor" => dash_path, 187 | # "deprecated" => false, 188 | # "label" => "DSL Entity", 189 | # "id" => "#{dot_path}/#{Enum.count(entity.args)}", 190 | # "header" => "#{tail_path}/#{Enum.count(entity.args)}", 191 | # "title" => "#{dot_path}/#{Enum.count(entity.args)}" 192 | # } 193 | # ) 194 | 195 | search_data = 196 | add_schema_to_search_data( 197 | search_data, 198 | extension_name, 199 | entity.schema, 200 | path 201 | ) 202 | 203 | # sidebar_items = 204 | # add_schema_to_sidebar_items( 205 | # sidebar_items, 206 | # extension_name, 207 | # to_string(Enum.at(path, 0)), 208 | # entity.schema, 209 | # entity.deprecations, 210 | # path 211 | # ) 212 | 213 | entity.entities 214 | |> Enum.flat_map(&List.wrap(elem(&1, 1))) 215 | |> Enum.reduce( 216 | {search_data, sidebar_items}, 217 | &add_entity_to_search_data(extension_name, &1, &2, path) 218 | ) 219 | end 220 | 221 | # defp add_schema_to_sidebar_items( 222 | # sidebar_items, 223 | # extension_name, 224 | # node_group_name, 225 | # schema, 226 | # deprecations, 227 | # path 228 | # ) do 229 | # Enum.reduce(schema || [], sidebar_items, fn {key, _config}, sidebar_items -> 230 | # path = path ++ [key] 231 | # dash_path = Enum.join(path, "-") 232 | # dot_path = Enum.join(path, ".") 233 | # 234 | # add_sidebar_item( 235 | # sidebar_items, 236 | # extension_name, 237 | # node_group_name, 238 | # %{ 239 | # "anchor" => dash_path, 240 | # "deprecated" => Keyword.has_key?(deprecations, key), 241 | # "label" => "DSL Option", 242 | # "id" => dot_path, 243 | # "hidden" => true, 244 | # "title" => dot_path 245 | # } 246 | # ) 247 | # end) 248 | # end 249 | 250 | # defp add_sidebar_item(sidebar_items, extension_name, node_group_name, item) do 251 | # sidebar_items 252 | # |> Map.put_new("extras", []) 253 | # |> Map.update!("extras", fn group -> 254 | # group_id = dsl_search_name(extension_name) 255 | # 256 | # group 257 | # |> Enum.map(fn group -> 258 | # if group["id"] == group_id do 259 | # group 260 | # |> Map.delete("headers") 261 | # |> Map.put("sections", []) 262 | # |> Map.put_new("nodeGroups", []) 263 | # |> Map.update!("nodeGroups", fn node_groups -> 264 | # node_groups 265 | # |> ensure_node_group(node_group_name) 266 | # |> Enum.map(fn node_group -> 267 | # if node_group["name"] == node_group_name do 268 | # node_group 269 | # |> Map.put_new("nodes", []) 270 | # |> Map.update!("nodes", fn nodes -> 271 | # nodes ++ [item] 272 | # end) 273 | # else 274 | # node_group 275 | # end 276 | # end) 277 | # end) 278 | # else 279 | # group 280 | # end 281 | # end) 282 | # end) 283 | # end 284 | 285 | # defp ensure_node_group(node_groups, node_group_name) do 286 | # if Enum.any?(node_groups, &(&1["name"] == node_group_name)) do 287 | # node_groups 288 | # else 289 | # node_groups ++ 290 | # [ 291 | # %{ 292 | # "key" => node_group_name, 293 | # "name" => node_group_name, 294 | # "nodes" => [] 295 | # } 296 | # ] 297 | # end 298 | # end 299 | 300 | defp add_schema_to_search_data( 301 | search_data, 302 | extension_name, 303 | schema, 304 | path 305 | ) do 306 | Enum.reduce(schema || [], search_data, fn {key, config}, search_data -> 307 | add_search_item( 308 | search_data, 309 | %{ 310 | "doc" => config[:doc] || "", 311 | "ref" => "#{dsl_search_name(extension_name)}.html##{Enum.join(path ++ [key], "-")}", 312 | "title" => "#{extension_name}.#{Enum.join(path ++ [key], ".")}", 313 | "type" => "DSL" 314 | } 315 | ) 316 | end) 317 | end 318 | 319 | defp dsl_search_name(extension_name) do 320 | ("dsl-" <> extension_name) |> String.split(".") |> Enum.map_join("-", &String.downcase/1) 321 | end 322 | 323 | defp add_search_item(search_data, item) do 324 | item = Map.update!(item, "title", &String.trim(&1 || "")) 325 | Map.update!(search_data, "items", &Enum.uniq([item | &1])) 326 | end 327 | end 328 | else 329 | defmodule Mix.Tasks.Spark.CheatSheetsInSearch do 330 | @shortdoc "Includes generated cheat sheets in the search bar" 331 | @moduledoc @shortdoc 332 | use Mix.Task 333 | 334 | def run(_opts) do 335 | raise "#{inspect(__MODULE__)} requires Jason. Please add it as a dev/test dependency." 336 | end 337 | end 338 | end 339 | -------------------------------------------------------------------------------- /lib/mix/tasks/spark.formatter.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Sourceror) do 2 | defmodule Mix.Tasks.Spark.Formatter do 3 | @shortdoc "Manages a variable called `spark_locals_without_parens` in the .formatter.exs from a list of DSL extensions." 4 | @moduledoc @shortdoc 5 | use Mix.Task 6 | 7 | @spec run(term) :: no_return 8 | def run(opts) do 9 | Mix.Task.run("compile") 10 | {opts, []} = OptionParser.parse!(opts, strict: [check: :boolean, extensions: :string]) 11 | 12 | unless opts[:extensions] do 13 | raise "Must supply a comma separated list of extensions to generate a .formatter.exs for" 14 | end 15 | 16 | extensions = 17 | opts[:extensions] 18 | |> String.split(",") 19 | |> Enum.map(&Module.concat([&1])) 20 | 21 | locals_without_parens = 22 | Enum.flat_map(extensions, fn extension_mod -> 23 | case Code.ensure_compiled(extension_mod) do 24 | {:module, _module} -> :ok 25 | other -> raise "Error ensuring extension compiled #{inspect(other)}" 26 | end 27 | 28 | all_entity_builders_everywhere( 29 | extension_mod.sections(), 30 | extension_mod.dsl_patches(), 31 | extensions 32 | ) 33 | end) 34 | |> Enum.uniq() 35 | |> Enum.sort() 36 | 37 | contents = File.read!(".formatter.exs") 38 | 39 | {_ast, spark_locals_without_parens} = 40 | contents 41 | |> Sourceror.parse_string!() 42 | |> Macro.prewalk( 43 | nil, 44 | fn 45 | {:=, _, 46 | [ 47 | {:spark_locals_without_parens, _, _}, 48 | right 49 | ]} = ast, 50 | _acc -> 51 | {ast, right} 52 | 53 | ast, acc -> 54 | {ast, acc} 55 | end 56 | ) 57 | 58 | if !spark_locals_without_parens do 59 | raise "Add `spark_locals_without_parens = []` to your .formatter.exs and run this again to populate the list." 60 | end 61 | 62 | new_contents = 63 | contents 64 | |> Sourceror.patch_string([ 65 | Sourceror.Patch.new( 66 | Sourceror.get_range(spark_locals_without_parens, include_comments: true), 67 | Sourceror.to_string(locals_without_parens, opts) 68 | ) 69 | ]) 70 | |> Code.format_string!() 71 | 72 | contents_with_newline = [new_contents, "\n"] 73 | 74 | if opts[:check] do 75 | if contents != IO.iodata_to_binary(contents_with_newline) do 76 | raise """ 77 | .formatter.exs is not up to date! 78 | 79 | Run the following command and commit the result: 80 | 81 | mix spark.formatter --extensions #{opts[:extensions]} 82 | """ 83 | else 84 | IO.puts("The current .formatter.exs is correct") 85 | end 86 | else 87 | File.write!(".formatter.exs", contents_with_newline) 88 | end 89 | end 90 | 91 | def all_entity_builders_everywhere(sections, dsl_patches, extensions, path \\ []) do 92 | patch_builders = 93 | dsl_patches 94 | |> Enum.filter(fn 95 | %Spark.Dsl.Patch.AddEntity{} -> 96 | true 97 | 98 | _ -> 99 | false 100 | end) 101 | |> Enum.map(& &1.entity) 102 | |> Enum.flat_map(fn entity -> 103 | Enum.concat([ 104 | Spark.Formatter.entity_option_builders(entity), 105 | Spark.Formatter.entity_builders(entity) 106 | ]) 107 | end) 108 | 109 | sections 110 | |> Enum.flat_map(fn section -> 111 | all_entity_builders_everywhere( 112 | section.sections, 113 | [], 114 | extensions, 115 | path ++ [section.name] 116 | ) 117 | end) 118 | |> Enum.concat(Spark.Formatter.all_entity_builders(sections, extensions, path)) 119 | |> Enum.concat(patch_builders) 120 | end 121 | end 122 | else 123 | defmodule Mix.Tasks.Spark.Formatter do 124 | @shortdoc "Manages a variable called `spark_locals_without_parens` in the .formatter.exs from a list of DSL extensions." 125 | @moduledoc @shortdoc 126 | use Mix.Task 127 | 128 | def run(_opts) do 129 | raise "This task requires sourceror to run. Please add it as a dev/test dependency" 130 | end 131 | end 132 | end 133 | -------------------------------------------------------------------------------- /lib/mix/tasks/spark.install.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Igniter) do 2 | defmodule Mix.Tasks.Spark.Install do 3 | @moduledoc "Installs spark by adding the `Spark.Formatter` plugin, and providing a basic configuration for it in `config.exs`." 4 | @shortdoc @moduledoc 5 | 6 | use Igniter.Mix.Task 7 | 8 | @impl true 9 | def info(_argv, _parent) do 10 | %Igniter.Mix.Task.Info{ 11 | schema: [ 12 | yes: :boolean 13 | ], 14 | aliases: [ 15 | y: :yes 16 | ] 17 | } 18 | end 19 | 20 | @impl true 21 | def igniter(igniter) do 22 | igniter 23 | |> Igniter.Project.Formatter.add_formatter_plugin(Spark.Formatter) 24 | |> then(fn igniter -> 25 | if Igniter.Project.Deps.has_dep?(igniter, :sourceror) do 26 | igniter 27 | else 28 | Igniter.Project.Deps.add_dep(igniter, {:sourceror, "~> 1.8", only: [:dev, :test]}, 29 | yes?: igniter.args.options[:yes], 30 | notify_on_present?: false 31 | ) 32 | end 33 | end) 34 | |> Igniter.Project.Config.configure( 35 | "config.exs", 36 | :spark, 37 | [:formatter, :remove_parens?], 38 | true, 39 | updater: &{:ok, &1} 40 | ) 41 | end 42 | end 43 | else 44 | defmodule Mix.Tasks.Spark.Install do 45 | @moduledoc "Installs spark by adding the `Spark.Formatter` plugin, and providing a basic configuration for it in `config.exs`." 46 | @shortdoc @moduledoc 47 | 48 | use Mix.Task 49 | 50 | def run(_argv) do 51 | Mix.shell().error(""" 52 | The task 'spark.install' requires igniter to be run. 53 | 54 | Please install igniter and try again. 55 | 56 | For more information, see: https://hexdocs.pm/igniter 57 | """) 58 | 59 | exit({:shutdown, 1}) 60 | end 61 | end 62 | end 63 | -------------------------------------------------------------------------------- /lib/mix/tasks/spark.replace_doc_links.ex: -------------------------------------------------------------------------------- 1 | defmodule Mix.Tasks.Spark.ReplaceDocLinks do 2 | @moduledoc """ 3 | Replaces any documentation links with text appropriate for hex docs. 4 | 5 | This makes projects support 6 | """ 7 | use Mix.Task 8 | 9 | @auto_prefixes ~w( 10 | ash ash_postgres ash_graphql ash_phoenix ash_authentication ash_archival 11 | ash_json_api ash_admin ash_state_machine ash_oban ash_geo ash_appsignal ash_rbac ash_query_builder 12 | ash_uuid ash_csv pyro smokestack ash_thrift ash_double_entry ash_ulid 13 | ) 14 | 15 | @prefixes @auto_prefixes 16 | |> Map.new(&{Macro.camelize(&1), &1}) 17 | |> Map.merge(%{ 18 | "AshAuthentication.Phoenix" => "ash_authentication_phoenix" 19 | }) 20 | 21 | @shortdoc "Replaces any spark dsl specific doc links with text appropriate for hex docs." 22 | def run(_argv) do 23 | mix_project = Mix.Project.get!() 24 | module_prefix = mix_project |> Module.split() |> Enum.at(0) 25 | 26 | "doc/**/*.html" 27 | |> Path.wildcard() 28 | |> Enum.each(fn file -> 29 | new_contents = 30 | file 31 | |> File.read!() 32 | |> String.replace(~r/\>d\:[a-zA-Z0-9|_\?\!\.]*\d:" <> contents -> 33 | contents = 34 | contents 35 | |> String.trim_trailing("<") 36 | |> String.replace("|", ".") 37 | 38 | module_name = 39 | contents 40 | |> String.split(".") 41 | |> Enum.take_while(&capitalized?/1) 42 | |> Enum.join(".") 43 | 44 | url_prefix = 45 | if String.starts_with?(module_name, module_prefix <> ".") do 46 | case Code.ensure_compiled(Module.concat([module_name])) do 47 | {:module, _} -> 48 | {:ok, ""} 49 | 50 | {:error, error} -> 51 | raise "Expected #{module_name} to be compiled because the link \"d:#{contents}\" was used, but it was not available: #{inspect(error)}" 52 | end 53 | else 54 | @prefixes 55 | |> Enum.filter(fn {key, _value} -> String.starts_with?(module_name, key) end) 56 | |> Enum.sort_by(fn {key, _} -> String.length(key) end) 57 | |> Enum.reverse() 58 | |> Enum.at(0) 59 | |> case do 60 | nil -> 61 | :error 62 | 63 | {_, package_name} -> 64 | {:ok, "https://hexdocs.pm/#{package_name}/"} 65 | end 66 | end 67 | 68 | case url_prefix do 69 | {:ok, prefix} -> 70 | name = 71 | module_name 72 | |> String.trim_trailing(".Dsl") 73 | |> String.split(".") 74 | |> Enum.map_join("-", &String.downcase/1) 75 | 76 | rest = 77 | contents |> String.trim_leading(module_name <> ".") |> String.replace(".", "-") 78 | 79 | ">#{contents}<" 80 | 81 | :error -> 82 | ">#{contents}<" 83 | end 84 | end) 85 | 86 | File.write!(file, new_contents) 87 | end) 88 | end 89 | 90 | defp capitalized?(string) do 91 | first = 92 | string 93 | |> String.graphemes() 94 | |> Enum.at(0) 95 | 96 | String.downcase(first) != first 97 | end 98 | end 99 | -------------------------------------------------------------------------------- /lib/spark.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark do 2 | @moduledoc """ 3 | Documentation for `Spark`. 4 | """ 5 | 6 | @doc """ 7 | Returns all modules that implement the specified behaviour for a given otp_app. 8 | 9 | Should only be called at runtime, not at compile time, as it will have 10 | inconsistent results at compile time. 11 | """ 12 | def sparks(otp_app, spark) do 13 | otp_app 14 | |> :application.get_key(:modules) 15 | |> case do 16 | {:ok, mods} when is_list(mods) -> 17 | mods 18 | 19 | _ -> 20 | [] 21 | end 22 | |> Enum.filter(&Spark.Dsl.is?(&1, spark)) 23 | end 24 | 25 | @doc "Returns true if the module implements the specified behavior" 26 | def implements_behaviour?(module, behaviour) do 27 | :attributes 28 | |> module.module_info() 29 | |> Enum.any?(fn 30 | {:behaviour, ^behaviour} -> 31 | true 32 | 33 | # optimizations, probably extremely minor but this is in a tight loop in some places 34 | {:behaviour, [^behaviour | _]} -> 35 | true 36 | 37 | {:behaviour, [_, ^behaviour | _]} -> 38 | true 39 | 40 | {:behaviour, [_, _, ^behaviour | _]} -> 41 | true 42 | 43 | # never seen a module with three behaviours in real life, let alone four. 44 | {:behaviour, behaviours} when is_list(behaviours) -> 45 | module in behaviours 46 | 47 | _ -> 48 | false 49 | end) 50 | rescue 51 | _ -> 52 | false 53 | end 54 | 55 | @doc "Returns the extensions a given DSL uses" 56 | def extensions(module) do 57 | Spark.Dsl.Extension.get_persisted(module, :extensions, []) 58 | end 59 | 60 | @doc "Returns the configured otp_app of a given DSL instance" 61 | def otp_app(module) do 62 | Spark.Dsl.Extension.get_persisted(module, :otp_app) 63 | end 64 | end 65 | -------------------------------------------------------------------------------- /lib/spark/docs.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Docs do 2 | @moduledoc """ 3 | Tools for generating docs & search data for extras. 4 | """ 5 | 6 | @doc """ 7 | Generates searchable documentation suitable for ex_doc 8 | """ 9 | def search_data_for(dsl) do 10 | dsl.sections() 11 | |> Enum.flat_map(fn section -> 12 | section_search_data(section) 13 | end) 14 | |> Enum.concat( 15 | Enum.flat_map(dsl.dsl_patches(), fn %Spark.Dsl.Patch.AddEntity{ 16 | section_path: section_path, 17 | entity: entity 18 | } -> 19 | entity_search_data(entity, section_path) 20 | end) 21 | ) 22 | end 23 | 24 | defp section_search_data(section, path \\ []) do 25 | schema_path = 26 | if section.top_level? do 27 | path 28 | else 29 | path ++ [section.name] 30 | end 31 | 32 | Enum.concat([ 33 | [ 34 | %{ 35 | anchor: anchor(schema_path), 36 | body: section.describe, 37 | title: title(schema_path), 38 | type: "DSL" 39 | } 40 | ], 41 | schema_search_data(section.schema, schema_path), 42 | Enum.flat_map(section.entities, &entity_search_data(&1, schema_path)), 43 | Enum.flat_map(section.sections, §ion_search_data(&1, schema_path)) 44 | ]) 45 | |> Enum.map(fn search_item -> 46 | if section.top_level? do 47 | %{search_item | anchor: "#{section.name}-#{search_item.anchor}"} 48 | else 49 | search_item 50 | end 51 | end) 52 | end 53 | 54 | defp entity_search_data(entity, path) do 55 | Enum.concat([ 56 | [ 57 | %{ 58 | anchor: anchor(path ++ [entity.name]), 59 | body: entity.describe, 60 | title: title(path ++ [entity.name]), 61 | type: "DSL" 62 | } 63 | ], 64 | schema_search_data(entity.schema, path ++ [entity.name]), 65 | Enum.flat_map(entity.entities, fn {_key, entities} -> 66 | Enum.flat_map(entities, fn nested_entity -> 67 | entity_search_data(nested_entity, path ++ [entity.name]) 68 | end) 69 | end) 70 | ]) 71 | end 72 | 73 | defp schema_search_data(schema, path) do 74 | Enum.flat_map(schema, fn {key, config} -> 75 | if config[:hide] do 76 | [] 77 | else 78 | [ 79 | %{ 80 | anchor: anchor(path ++ [key]), 81 | body: config[:doc] || "", 82 | title: title(path ++ [key]), 83 | type: "DSL" 84 | } 85 | ] 86 | end 87 | end) 88 | end 89 | 90 | defp anchor(list), do: Enum.join(list, "-") 91 | defp title(list), do: Enum.join(list, ".") 92 | end 93 | -------------------------------------------------------------------------------- /lib/spark/dsl/builder.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Builder do 2 | @moduledoc """ 3 | Utilities for building DSL objects programatically, generally used in transformers. 4 | """ 5 | 6 | defmacro __using__(_) do 7 | quote do 8 | import Spark.Dsl.Builder 9 | end 10 | end 11 | 12 | @type result :: {:ok, Spark.Dsl.t()} | {:error, term()} 13 | @type input :: {:ok, Spark.Dsl.t()} | {:error, term()} | Spark.Dsl.t() 14 | 15 | defmacro defbuilder({func, _, [dsl_state | rest_args]}, do: body) do 16 | def_head? = Enum.any?(rest_args, &match?({:\\, _, _}, &1)) 17 | rest_args_with_defaults = rest_args 18 | 19 | rest_args = 20 | Enum.map(rest_args, fn 21 | {:\\, _, [expr, _default]} -> 22 | expr 23 | 24 | other -> 25 | other 26 | end) 27 | 28 | quote generated: true, 29 | location: :keep, 30 | bind_quoted: [ 31 | def_head?: def_head?, 32 | rest_args: Macro.escape(rest_args), 33 | rest_args_with_defaults: Macro.escape(rest_args_with_defaults), 34 | dsl_state: Macro.escape(dsl_state), 35 | func: Macro.escape(func), 36 | body: Macro.escape(body) 37 | ] do 38 | if def_head? do 39 | def unquote(func)(unquote(dsl_state), unquote_splicing(rest_args_with_defaults)) 40 | end 41 | 42 | def unquote(func)({:ok, unquote(dsl_state)}, unquote_splicing(rest_args)) do 43 | case unquote(body) do 44 | {:ok, result} -> 45 | {:ok, result} 46 | 47 | {:error, error} -> 48 | {:error, error} 49 | 50 | body -> 51 | {:ok, body} 52 | end 53 | end 54 | 55 | def unquote(func)( 56 | {:error, error}, 57 | unquote_splicing( 58 | Enum.map(rest_args, fn _ -> 59 | {:_, [], Elixir} 60 | end) 61 | ) 62 | ) do 63 | {:error, error} 64 | end 65 | 66 | def unquote(func)(unquote(dsl_state), unquote_splicing(rest_args)) do 67 | case unquote(body) do 68 | {:ok, result} -> 69 | {:ok, result} 70 | 71 | {:error, error} -> 72 | {:error, error} 73 | 74 | body -> 75 | {:ok, body} 76 | end 77 | end 78 | end 79 | end 80 | 81 | defmacro defbuilderp({func, _, [dsl_state | rest_args]}, do: body) do 82 | def_head? = Enum.any?(rest_args, &match?({:\\, _, _}, &1)) 83 | rest_args_with_defaults = rest_args 84 | 85 | rest_args = 86 | Enum.map(rest_args, fn 87 | {:\\, _, [expr, _default]} -> 88 | expr 89 | 90 | other -> 91 | other 92 | end) 93 | 94 | quote generated: true, 95 | location: :keep, 96 | bind_quoted: [ 97 | def_head?: def_head?, 98 | rest_args: Macro.escape(rest_args), 99 | rest_args_with_defaults: Macro.escape(rest_args_with_defaults), 100 | dsl_state: Macro.escape(dsl_state), 101 | func: Macro.escape(func), 102 | body: Macro.escape(body) 103 | ] do 104 | if def_head? do 105 | defp unquote(func)(unquote(dsl_state), unquote_splicing(rest_args_with_defaults)) 106 | end 107 | 108 | defp unquote(func)({:ok, unquote(dsl_state)}, unquote_splicing(rest_args)) do 109 | case unquote(body) do 110 | {:ok, result} -> 111 | {:ok, result} 112 | 113 | {:error, error} -> 114 | {:error, error} 115 | 116 | body -> 117 | {:ok, body} 118 | end 119 | end 120 | 121 | defp unquote(func)( 122 | {:error, error}, 123 | unquote_splicing( 124 | Enum.map(rest_args, fn _ -> 125 | {:_, [], Elixir} 126 | end) 127 | ) 128 | ) do 129 | {:error, error} 130 | end 131 | 132 | defp unquote(func)(unquote(dsl_state), unquote_splicing(rest_args)) do 133 | case unquote(body) do 134 | {:ok, result} -> 135 | {:ok, result} 136 | 137 | {:error, error} -> 138 | {:error, error} 139 | 140 | body -> 141 | {:ok, body} 142 | end 143 | end 144 | end 145 | end 146 | 147 | @doc """ 148 | Handles nested values that may be `{:ok, result}` or `{:error, term}`, returning any errors and unwrapping any ok values 149 | 150 | This allows users of builders to do things like: 151 | 152 | ```elixir 153 | dsl_state 154 | |> Ash.Resource.Builder.add_new_action(:update, :publish, 155 | changes: [ 156 | Ash.Resource.Builder.build_action_change( 157 | Ash.Resource.Change.Builtins.set_attribute(:state, :published) 158 | ) 159 | ] 160 | ) 161 | ``` 162 | 163 | If your builder function calls `handle_nested_builders/2` with their input before building the thing its building. 164 | """ 165 | def handle_nested_builders(opts, nested) do 166 | Enum.reduce_while(nested, {:ok, opts}, fn nested, {:ok, opts} -> 167 | case Keyword.get(opts, nested) do 168 | nil -> 169 | {:cont, {:ok, opts}} 170 | 171 | values when is_list(values) -> 172 | Enum.reduce_while(values, {:ok, []}, fn 173 | {:ok, value}, {:ok, values} -> 174 | {:cont, {:ok, [value | values]}} 175 | 176 | {:error, error}, _ -> 177 | {:halt, {:error, error}} 178 | 179 | value, {:ok, values} -> 180 | {:cont, {:ok, [value | values]}} 181 | end) 182 | |> case do 183 | {:ok, values} -> {:cont, {:ok, Keyword.put(opts, nested, Enum.reverse(values))}} 184 | other -> other 185 | end 186 | 187 | {:ok, value} -> 188 | {:cont, {:ok, Keyword.put(opts, nested, value)}} 189 | 190 | {:error, error} -> 191 | {:halt, {:error, error}} 192 | 193 | _value -> 194 | {:cont, {:ok, opts}} 195 | end 196 | end) 197 | end 198 | end 199 | -------------------------------------------------------------------------------- /lib/spark/dsl/entity.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Entity do 2 | @moduledoc """ 3 | Declares a DSL entity. 4 | 5 | A dsl entity represents a dsl constructor who's resulting value is a struct. 6 | This lets the user create complex objects with arbitrary(mostly) validation rules. 7 | 8 | The lifecycle of creating entities is complex, happening as Elixir is compiling 9 | the modules in question. Some of the patterns around validating/transforming entities 10 | have not yet solidified. If you aren't careful and don't follow the guidelines listed 11 | here, you can have subtle and strange bugs during compilation. Anything not isolated to 12 | simple value validations should be done in `transformers`. See `Spark.Dsl.Transformer`. 13 | 14 | An entity has a `target` indicating which struct will ultimately be built. An entity 15 | also has a `schema`. This schema is used for documentation, and the options are validated 16 | against it before continuing on with the DSL. 17 | 18 | To create positional arguments to the builder, use `args`. The values provided to 19 | `args` need to be in the provided schema as well. They will be positional arguments 20 | in the same order that they are provided in the `args` key. 21 | 22 | `auto_set_fields` will set the provided values into the produced struct (they do not need 23 | to be included in the schema). 24 | 25 | `transform` is a function that takes a created struct and can alter it. This happens immediately 26 | after handling the DSL options, and can be useful for setting field values on a struct based on 27 | other values in that struct. If you need things that aren't contained in that struct, use an 28 | `Spark.Dsl.Transformer`. This function returns `{:ok, new_entity}` or `{:error, error}`, so this can 29 | also be used to validate the entity. 30 | 31 | `entities` allows you to specify a keyword list of nested entities. Nested entities are stored 32 | on the struct in the corresponding key, and are used in the same way entities are otherwise. 33 | 34 | `singleton_entity_keys` specifies a set of entity keys (specified above) that should only have a 35 | single value. This will be validated and unwrapped into `nil` | `single_value` on success. 36 | 37 | `identifier` expresses that a given entity is unique by that field, validated by the DSL. 38 | 39 | ## Example 40 | 41 | ```elixir 42 | @my_entity %Spark.Dsl.Entity{ 43 | name: :my_entity, 44 | target: MyStruct, 45 | schema: [my_field: [type: :atom, required: false]] 46 | } 47 | ``` 48 | 49 | Once compiled by Spark, entities can be invoked with a keyword list: 50 | 51 | ```elixir 52 | my_entity my_field: :value 53 | ``` 54 | 55 | Or with a do block: 56 | 57 | ```elixir 58 | my_entity do 59 | my_field :value 60 | end 61 | ``` 62 | 63 | For a full example, see `Spark.Dsl.Extension`. 64 | """ 65 | 66 | defstruct [ 67 | :name, 68 | :target, 69 | :transform, 70 | :recursive_as, 71 | examples: [], 72 | entities: [], 73 | singleton_entity_keys: [], 74 | deprecations: [], 75 | describe: "", 76 | snippet: "", 77 | args: [], 78 | links: nil, 79 | hide: [], 80 | identifier: nil, 81 | modules: [], 82 | imports: [], 83 | no_depend_modules: [], 84 | schema: [], 85 | auto_set_fields: [], 86 | docs: "" 87 | ] 88 | 89 | alias Spark.{Dsl.Entity} 90 | 91 | @typedoc """ 92 | Defines the struct that will be built from this entity definition. 93 | 94 | The struct will need to have fields for all [`entities`](#t:entities/0), `t:schema/0` fields, and `t:auto_set_fields/0`. 95 | """ 96 | @type target :: module() | nil 97 | 98 | @typedoc """ 99 | A keyword list of nested entities. 100 | """ 101 | @type entities :: keyword(t) 102 | 103 | @typedoc """ 104 | Specifies a function that will run on the target struct after building. 105 | 106 | ```elixir 107 | @my_entity %Spark.Dsl.Entity{ 108 | name: :my_entity, 109 | target: MyEntity, 110 | schema: [ 111 | my_field: [type: :list, required: true] 112 | ], 113 | transform: {MyModule, :max_three_items, []} 114 | } 115 | 116 | def max_three_items(my_entity) do 117 | if length(my_entity.my_field) > 3 do 118 | {:error, "Can't have more than three items"} 119 | else 120 | {:ok, my_entity} 121 | end 122 | end 123 | ``` 124 | """ 125 | @type transform :: {module(), function :: atom(), args :: [any()]} | nil 126 | 127 | @typedoc """ 128 | Specifies positional arguments for an Entity. 129 | 130 | An entity declared like this: 131 | 132 | ```elixir 133 | @entity %Spark.Dsl.Entity{ 134 | name: :entity, 135 | target: Entity, 136 | schema: [ 137 | positional: [type: :atom, required: true], 138 | other: [type: :atom, required: false], 139 | ], 140 | args: [:positional] 141 | } 142 | ``` 143 | 144 | Can be instantiated like this: 145 | 146 | ```elixir 147 | entity :positional_argument do 148 | other :other_argument 149 | end 150 | ``` 151 | """ 152 | @type args :: [atom | {:optional, atom} | {:optional, atom, any}] 153 | 154 | @typedoc """ 155 | Set the provided key value pairs in the produced struct. These fields do not need to be included in the Entity's schema. 156 | """ 157 | @type auto_set_fields :: keyword(any) 158 | 159 | @type deprecations :: keyword(String.t()) 160 | 161 | # Using type id() since identifier is a reserved type. 162 | @type id :: term() 163 | 164 | @type imports :: [module()] 165 | 166 | @type name :: atom | nil 167 | 168 | @typedoc """ 169 | Internal field. Not set by user. 170 | """ 171 | @type docs :: String.t() 172 | @typedoc """ 173 | User provided documentation. 174 | 175 | Documentation provided in a `Entity`'s `describe` field will be included by `Spark` in any generated documentation that includes the `Entity`. 176 | """ 177 | @type describe :: String.t() 178 | 179 | @type examples :: [String.t()] 180 | 181 | @type hide :: [atom()] 182 | 183 | @type links :: keyword([String.t()]) | nil 184 | 185 | @type modules :: [atom] 186 | 187 | @type no_depend_modules :: [atom] 188 | 189 | @type recursive_as :: atom | nil 190 | 191 | @type singleton_entity_keys :: [atom] 192 | 193 | @type snippet :: String.t() 194 | 195 | @type t :: %Entity{ 196 | args: args(), 197 | auto_set_fields: auto_set_fields(), 198 | deprecations: deprecations(), 199 | describe: describe(), 200 | docs: docs(), 201 | entities: entities(), 202 | examples: examples(), 203 | hide: hide(), 204 | identifier: id(), 205 | imports: imports(), 206 | links: links(), 207 | modules: modules(), 208 | name: name(), 209 | no_depend_modules: no_depend_modules(), 210 | recursive_as: recursive_as(), 211 | schema: Spark.Options.schema(), 212 | singleton_entity_keys: singleton_entity_keys(), 213 | snippet: snippet(), 214 | target: target(), 215 | transform: transform() 216 | } 217 | 218 | @doc false 219 | def arg_names(entity) do 220 | entity.args 221 | |> Kernel.||([]) 222 | |> Enum.map(fn 223 | tuple when is_tuple(tuple) -> 224 | elem(tuple, 1) 225 | 226 | other -> 227 | other 228 | end) 229 | end 230 | 231 | @doc false 232 | def required_arg_names(entity) do 233 | entity.args 234 | |> Kernel.||([]) 235 | |> Enum.filter(&is_atom/1) 236 | end 237 | 238 | @doc false 239 | def build( 240 | %{ 241 | target: target, 242 | schema: schema, 243 | auto_set_fields: auto_set_fields, 244 | transform: transform, 245 | identifier: identifier, 246 | singleton_entity_keys: singleton_entity_keys, 247 | entities: nested_entity_definitions 248 | }, 249 | opts, 250 | nested_entities 251 | ) do 252 | with {:ok, opts, more_nested_entities} <- 253 | fetch_single_argument_entities_from_opts(opts, nested_entity_definitions), 254 | opts <- Keyword.new(opts), 255 | {before_validate_auto, after_validate_auto} = 256 | Keyword.split(auto_set_fields || [], Keyword.keys(schema)), 257 | {:ok, opts} <- Spark.Options.validate(Keyword.merge(opts, before_validate_auto), schema), 258 | opts <- Keyword.merge(opts, after_validate_auto), 259 | opts <- Enum.map(opts, fn {key, value} -> {schema[key][:as] || key, value} end), 260 | built <- struct(target, opts), 261 | built <- 262 | struct( 263 | built, 264 | Keyword.merge( 265 | Keyword.new(nested_entities), 266 | Keyword.new(more_nested_entities), 267 | fn _k, v1, v2 -> 268 | v1 ++ v2 269 | end 270 | ) 271 | ), 272 | {:ok, built} <- validate_singleton_entity_keys(built, singleton_entity_keys), 273 | {:ok, built} <- transform(transform, built) do 274 | maybe_apply_identifier(built, identifier) 275 | end 276 | end 277 | 278 | defp fetch_single_argument_entities_from_opts(opts, nested_entity_definitions) do 279 | Enum.reduce_while(nested_entity_definitions, {:ok, opts, []}, fn 280 | {key, entity_definitions}, {:ok, opts, more_nested_entities} -> 281 | entity_definitions 282 | |> Enum.filter(fn entity_definition -> Enum.count(entity_definition.args) == 1 end) 283 | |> Enum.reduce_while( 284 | {:ok, opts, more_nested_entities}, 285 | fn entity_definition, {:ok, opts, more_nested_entities} -> 286 | values = Keyword.get_values(opts, entity_definition.name) 287 | opts = Keyword.delete(opts, entity_definition.name) 288 | 289 | Enum.reduce_while(values, {:ok, opts, more_nested_entities}, fn single_arg, 290 | {:ok, opts, 291 | more_nested_entities} -> 292 | case build( 293 | entity_definition, 294 | [{Enum.at(entity_definition.args, 0), single_arg}], 295 | [] 296 | ) do 297 | {:ok, built} -> 298 | {:cont, 299 | {:ok, opts, 300 | Keyword.update( 301 | more_nested_entities, 302 | key, 303 | [built], 304 | &[built | &1] 305 | )}} 306 | 307 | {:error, error} -> 308 | {:halt, {:error, error}} 309 | end 310 | end) 311 | |> case do 312 | {:ok, opts, more_nested_entities} -> 313 | {:cont, {:ok, opts, more_nested_entities}} 314 | 315 | {:error, error} -> 316 | {:halt, {:error, error}} 317 | end 318 | end 319 | ) 320 | |> case do 321 | {:ok, opts, more_nested_entities} -> 322 | {:cont, {:ok, opts, more_nested_entities}} 323 | 324 | {:error, error} -> 325 | {:halt, {:error, error}} 326 | end 327 | end) 328 | end 329 | 330 | defp validate_singleton_entity_keys(entity, []), do: {:ok, entity} 331 | 332 | defp validate_singleton_entity_keys(entity, [key | rest]) do 333 | case Map.get(entity, key) do 334 | nil -> 335 | validate_singleton_entity_keys(entity, rest) 336 | 337 | [] -> 338 | validate_singleton_entity_keys(Map.put(entity, key, nil), rest) 339 | 340 | [nested_entity] -> 341 | validate_singleton_entity_keys(Map.put(entity, key, nested_entity), rest) 342 | 343 | entities -> 344 | {:error, "Expected a single #{key}, got #{Enum.count(entities)}"} 345 | end 346 | end 347 | 348 | @doc false 349 | def maybe_apply_identifier(struct, nil), do: {:ok, struct} 350 | 351 | def maybe_apply_identifier(struct, {:auto, :unique_integer}) 352 | when is_map_key(struct, :__identifier__), 353 | do: {:ok, %{struct | __identifier__: System.unique_integer()}} 354 | 355 | def maybe_apply_identifier(struct, {:auto, :unique_integer}), 356 | do: raise("#{inspect(struct.__struct__)} must have the `__identifier__` field!") 357 | 358 | def maybe_apply_identifier(struct, name) 359 | when is_map_key(struct, :__identifier__) and is_map_key(struct, name), 360 | do: {:ok, %{struct | __identifier__: Map.get(struct, name)}} 361 | 362 | def maybe_apply_identifier(struct, name) when is_map_key(struct, :__identifier__), 363 | do: raise("#{inspect(struct.__struct__)} does not have a field named `#{inspect(name)}`!") 364 | 365 | def maybe_apply_identifier(struct, _name), 366 | do: raise("#{inspect(struct.__struct__)} must have the `__identifier__` field!") 367 | 368 | @doc false 369 | def transform(nil, built), do: {:ok, built} 370 | 371 | def transform({module, function, args}, built) do 372 | apply(module, function, [built | args]) 373 | end 374 | end 375 | -------------------------------------------------------------------------------- /lib/spark/dsl/extension/entity.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Extension.Entity do 2 | @moduledoc false 3 | 4 | def setup(module, recursive_as, nested_key, opts, arg_values) do 5 | parent_recursive_as = Process.get(:parent_recursive_as) 6 | original_nested_entity_path = Process.get(:recursive_builder_path) 7 | 8 | nested_entity_path = 9 | if is_nil(original_nested_entity_path) do 10 | Process.put(:recursive_builder_path, []) 11 | [] 12 | else 13 | unless recursive_as || nested_key || parent_recursive_as do 14 | raise "Somehow got a nested entity without a `recursive_as` or `nested_key`" 15 | end 16 | 17 | path = 18 | (original_nested_entity_path || []) ++ 19 | [recursive_as || nested_key || parent_recursive_as] 20 | 21 | Process.put( 22 | :recursive_builder_path, 23 | path 24 | ) 25 | 26 | path 27 | end 28 | 29 | if recursive_as do 30 | Process.put(:parent_recursive_as, recursive_as) 31 | end 32 | 33 | current_sections = Process.get({module, :spark_sections}, []) 34 | 35 | keyword_opts = 36 | Keyword.merge( 37 | opts, 38 | arg_values, 39 | fn key, _, _ -> 40 | raise Spark.Error.DslError, 41 | module: module, 42 | message: "Multiple values for key `#{inspect(key)}`", 43 | path: nested_entity_path 44 | end 45 | ) 46 | 47 | Process.put( 48 | {:builder_opts, nested_entity_path}, 49 | keyword_opts 50 | ) 51 | 52 | {original_nested_entity_path, parent_recursive_as, nested_entity_path, current_sections} 53 | end 54 | 55 | def handle( 56 | module, 57 | section_path, 58 | nested_entity_keys, 59 | entity_builder, 60 | extension, 61 | {original_nested_entity_path, parent_recursive_as, nested_entity_path, current_sections} 62 | ) do 63 | Process.put(:recursive_builder_path, original_nested_entity_path) 64 | Process.put(:parent_recursive_as, parent_recursive_as) 65 | 66 | current_config = 67 | Process.get( 68 | {module, :spark, section_path ++ nested_entity_path}, 69 | %{entities: [], opts: []} 70 | ) 71 | 72 | opts = Process.delete({:builder_opts, nested_entity_path}) 73 | 74 | nested_entities = 75 | nested_entity_keys 76 | |> Enum.reduce(%{}, fn key, acc -> 77 | nested_path = section_path ++ nested_entity_path ++ [key] 78 | 79 | entities = 80 | {module, :spark, nested_path} 81 | |> Process.get(%{entities: []}) 82 | |> Map.get(:entities, []) 83 | 84 | Process.delete({module, :spark, nested_path}) 85 | 86 | Map.update(acc, key, entities, fn current_nested_entities -> 87 | (current_nested_entities || []) ++ entities 88 | end) 89 | end) 90 | 91 | built = entity_builder.__build__(module, opts, nested_entities) 92 | 93 | new_config = %{current_config | entities: current_config.entities ++ [built]} 94 | 95 | unless {extension, section_path} in current_sections do 96 | Process.put({module, :spark_sections}, [ 97 | {extension, section_path} | current_sections 98 | ]) 99 | end 100 | 101 | Process.put( 102 | {module, :spark, section_path ++ nested_entity_path}, 103 | new_config 104 | ) 105 | end 106 | end 107 | -------------------------------------------------------------------------------- /lib/spark/dsl/extension/entity_option.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Extension.EntityOption do 2 | @moduledoc false 3 | 4 | def value_and_function(value, field, type, caller, modules, no_depend_modules) do 5 | value = 6 | case type do 7 | :quoted -> 8 | Macro.escape(value) 9 | 10 | _ -> 11 | value 12 | end 13 | 14 | value = 15 | cond do 16 | field in modules -> 17 | Spark.Dsl.Extension.expand_alias(value, caller) 18 | 19 | field in no_depend_modules -> 20 | Spark.Dsl.Extension.expand_alias_no_require(value, caller) 21 | 22 | true -> 23 | value 24 | end 25 | 26 | Spark.CodeHelpers.lift_functions(value, field, caller) 27 | end 28 | 29 | def set_entity_option(module, key, value) do 30 | nested_entity_path = Process.get(:recursive_builder_path) 31 | current_opts = Process.get({:builder_opts, nested_entity_path}, []) 32 | 33 | if Keyword.has_key?(current_opts, key) do 34 | raise Spark.Error.DslError, 35 | module: module, 36 | message: "Multiple values for key `#{inspect(key)}`", 37 | path: nested_entity_path 38 | end 39 | 40 | Process.put( 41 | {:builder_opts, nested_entity_path}, 42 | Keyword.put(current_opts, key, value) 43 | ) 44 | end 45 | end 46 | -------------------------------------------------------------------------------- /lib/spark/dsl/extension/imports.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Extension.Imports do 2 | @moduledoc false 3 | def import_solving_conflicts(mods, caller) do 4 | mods 5 | |> Enum.flat_map(fn mod -> 6 | [do_import(mod) | resolve_conflicts(mod, caller)] 7 | end) 8 | end 9 | 10 | defp resolve_conflicts(mod, caller) do 11 | imported_by_mod = mod.__info__(:functions) ++ mod.__info__(:macros) 12 | 13 | unimports_for_conflicts(caller, mod, imported_by_mod) 14 | end 15 | 16 | defp do_import(mod) do 17 | quote do 18 | import unquote(mod) 19 | end 20 | end 21 | 22 | defp unimports_for_conflicts(caller, importing_module, funs) do 23 | caller.functions 24 | |> Keyword.merge(caller.macros, fn _k, v1, v2 -> v1 ++ v2 end) 25 | |> Keyword.drop([Kernel, importing_module]) 26 | |> Enum.flat_map(fn {mod, imports} -> 27 | imports 28 | |> Enum.filter(fn fun_arity -> 29 | fun_arity in funs 30 | end) 31 | |> case do 32 | [] -> 33 | [] 34 | 35 | unimports -> 36 | [{mod, unimports}] 37 | end 38 | end) 39 | |> Enum.map(fn {mod, unimports} -> 40 | quote do 41 | import unquote(mod), except: unquote(unimports) 42 | end 43 | end) 44 | end 45 | end 46 | -------------------------------------------------------------------------------- /lib/spark/dsl/extension/section_option.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Extension.SectionOption do 2 | @moduledoc false 3 | 4 | def value_and_function(value, field, type, caller, section_modules, section_no_depend_modules) do 5 | value = 6 | case type do 7 | :quoted -> 8 | Macro.escape(value) 9 | 10 | _ -> 11 | value 12 | end 13 | 14 | value = 15 | cond do 16 | field in section_modules -> 17 | Spark.Dsl.Extension.expand_alias(value, caller) 18 | 19 | field in section_no_depend_modules -> 20 | Spark.Dsl.Extension.expand_alias_no_require(value, caller) 21 | 22 | true -> 23 | value 24 | end 25 | 26 | Spark.CodeHelpers.lift_functions(value, field, caller) 27 | end 28 | 29 | def set_section_option(module, extension, section_path, field, value) do 30 | current_sections = Process.get({module, :spark_sections}, []) 31 | 32 | unless {extension, section_path} in current_sections do 33 | Process.put({module, :spark_sections}, [ 34 | {extension, section_path} | current_sections 35 | ]) 36 | end 37 | 38 | current_config = 39 | Process.get( 40 | {module, :spark, section_path}, 41 | %{entities: [], opts: []} 42 | ) 43 | 44 | Process.put( 45 | {module, :spark, section_path}, 46 | %{ 47 | current_config 48 | | opts: Keyword.put(current_config.opts, field, value) 49 | } 50 | ) 51 | end 52 | end 53 | -------------------------------------------------------------------------------- /lib/spark/dsl/fragment.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Fragment do 2 | @moduledoc """ 3 | Allows splitting up a DSL into multiple modules, potentially organizing large DSLs 4 | 5 | Use the `of` option to expression what your fragment is a fragment of. You can add 6 | extensions as you would normally to that resource, and they will be added to the 7 | parent resource. 8 | 9 | defmodule MyApp.Resource.Graphql do 10 | use Spark.Dsl.Fragment, of: Ash.Resource, extensions: AshGraphql.Resource 11 | 12 | graphql do 13 | ... 14 | end 15 | end 16 | 17 | Then add the fragment to the parent resource. 18 | 19 | defmodule MyApp.Resource do 20 | use Ash.Resource, fragments: [MyApp.Resource.Graphql], ... 21 | end 22 | """ 23 | 24 | defmacro __using__(opts) do 25 | opts = Spark.Dsl.Extension.do_expand(opts, __CALLER__) 26 | original_opts = opts 27 | single_extension_kinds = opts[:of].single_extension_kinds() 28 | many_extension_kinds = opts[:of].many_extension_kinds() 29 | 30 | {opts, extensions} = 31 | opts[:of].default_extension_kinds() 32 | |> Enum.reduce(opts, fn {key, defaults}, opts -> 33 | Keyword.update(opts, key, defaults, fn current_value -> 34 | cond do 35 | key in single_extension_kinds -> 36 | current_value || defaults 37 | 38 | key in many_extension_kinds || key == :extensions -> 39 | List.wrap(current_value) ++ List.wrap(defaults) 40 | 41 | true -> 42 | current_value 43 | end 44 | end) 45 | end) 46 | |> Spark.Dsl.expand_modules( 47 | [ 48 | single_extension_kinds: single_extension_kinds, 49 | many_extension_kinds: many_extension_kinds 50 | ], 51 | __CALLER__ 52 | ) 53 | 54 | extensions = 55 | extensions 56 | |> Enum.flat_map(&[&1 | &1.add_extensions()]) 57 | |> Enum.uniq() 58 | 59 | Module.register_attribute(__CALLER__.module, :spark_extension_kinds, persist: true) 60 | Module.register_attribute(__CALLER__.module, :spark_fragment_of, persist: true) 61 | 62 | Module.put_attribute(__CALLER__.module, :spark_fragment_of, opts[:of]) 63 | Module.put_attribute(__CALLER__.module, :extensions, extensions) 64 | Module.put_attribute(__CALLER__.module, :original_opts, original_opts) 65 | 66 | Module.put_attribute( 67 | __CALLER__.module, 68 | :spark_extension_kinds, 69 | List.wrap(many_extension_kinds) ++ 70 | List.wrap(single_extension_kinds) 71 | ) 72 | 73 | quote do 74 | require unquote(opts[:of]) 75 | unquote(Spark.Dsl.Extension.prepare(extensions)) 76 | @before_compile Spark.Dsl.Fragment 77 | end 78 | end 79 | 80 | defmacro __before_compile__(_) do 81 | quote do 82 | Spark.Dsl.Extension.set_state([], [], false) 83 | 84 | def extensions do 85 | @extensions 86 | end 87 | 88 | def opts do 89 | @original_opts 90 | end 91 | 92 | def spark_dsl_config do 93 | @spark_dsl_config 94 | end 95 | 96 | def validate_sections do 97 | List.wrap(@validate_sections) 98 | end 99 | 100 | @persisted @spark_dsl_config[:persist] 101 | 102 | def persisted do 103 | @persisted 104 | end 105 | end 106 | end 107 | end 108 | -------------------------------------------------------------------------------- /lib/spark/dsl/patch/add_entity.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Patch.AddEntity do 2 | @moduledoc """ 3 | Supply this when defining an extension to add entity builders to another extension's section. 4 | 5 | For example 6 | 7 | ```elixir 8 | @entity %Spark.Dsl.Entity{ 9 | ... 10 | } 11 | 12 | @dsl_patch %Spark.Dsl.Patch.AddEntity{section_path: [:foo, :bar], entity: @entity} 13 | 14 | use Spark.Dsl.Extension, dsl_patches: [@dsl_patch] 15 | ``` 16 | """ 17 | @type t :: %__MODULE__{ 18 | section_path: list(atom), 19 | entity: Spark.Dsl.Entity.t() 20 | } 21 | 22 | defstruct [:section_path, :entity] 23 | end 24 | -------------------------------------------------------------------------------- /lib/spark/dsl/section.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Section do 2 | @moduledoc """ 3 | Declares a DSL section. 4 | 5 | A dsl section allows you to organize related configurations. All extensions 6 | configure sections, they cannot add DSL builders to the top level. This 7 | keeps things organized, and concerns separated. 8 | 9 | A section may have nested sections, which will be configured the same as other sections. 10 | Getting the options/entities of a section is done by providing a path, so you would 11 | use the nested path to retrieve that configuration. See `Spark.Dsl.Extension.get_entities/2` 12 | and `Spark.Dsl.Extension.get_opt/4`. 13 | 14 | A section may have entities, which are constructors that produce instances of structs. 15 | For more on entities, see `Spark.Dsl.Entity`. 16 | 17 | A section may also have a `schema`, which you can learn more about in `Spark.Options`. Spark will produce 18 | builders for those options, so that they may be configured. They are retrieved with 19 | `Spark.Dsl.Extension.get_opt/4`. 20 | 21 | To create a section that is available at the top level (i.e not nested inside of its own name), use 22 | `top_level?: true`. Remember, however, that this has no effect on sections nested inside of other sections. 23 | 24 | For a full example, see `Spark.Dsl.Extension`. 25 | """ 26 | defstruct [ 27 | :name, 28 | imports: [], 29 | schema: [], 30 | describe: "", 31 | snippet: "", 32 | links: nil, 33 | examples: [], 34 | modules: [], 35 | top_level?: false, 36 | no_depend_modules: [], 37 | auto_set_fields: [], 38 | deprecations: [], 39 | entities: [], 40 | sections: [], 41 | docs: "", 42 | patchable?: false 43 | ] 44 | 45 | alias Spark.{ 46 | Dsl.Entity, 47 | Dsl.Section 48 | } 49 | 50 | @type name :: atom() 51 | 52 | @type imports :: [module] 53 | 54 | @typedoc """ 55 | User provided documentation. 56 | 57 | Documentation provided in a `Section`'s `describe` field will be included by `Spark` in any generated documentation that includes the `Section`. 58 | """ 59 | @type describe :: String.t() 60 | 61 | @type snippet :: String.t() 62 | @typedoc """ 63 | Determines whether a section can be declared directly in a module. 64 | 65 | When `top_level?: true`, that Section's DSL can be declared outside of a `do` block in a module. 66 | 67 | ## Example 68 | 69 | A `Section` declared with `top_level?: true`: 70 | 71 | ```elixir 72 | @my_section %Spark.Dsl.Section{ 73 | top_level?: true, 74 | name: :my_section, 75 | schema: [my_field: [type: :atom, required: true]] 76 | } 77 | ``` 78 | 79 | Can be declared like this: 80 | 81 | ```elixir 82 | defmodule MyDslModule do 83 | my_field :value 84 | end 85 | ``` 86 | 87 | With `top_level?: false`, the DSL section would need to be declared explicitly/: 88 | 89 | ```elixir 90 | defmodule MyDslModule do 91 | my_section do 92 | my_field :value 93 | end 94 | end 95 | ``` 96 | """ 97 | @type top_level?() :: boolean() 98 | 99 | @type links :: nil | keyword([String.t()]) 100 | 101 | @type examples() :: [String.t()] 102 | 103 | @type modules :: [atom] 104 | 105 | @type no_depend_modules() :: [atom] 106 | 107 | @type auto_set_fields() :: keyword(any) 108 | 109 | @type entities :: [Entity.t()] 110 | 111 | @type sections :: [Section.t()] 112 | 113 | @typedoc """ 114 | Internal field. Not set by user. 115 | """ 116 | @type docs :: String.t() 117 | 118 | @type patchable? :: boolean() 119 | 120 | @type t :: %Section{ 121 | name: name(), 122 | imports: imports(), 123 | schema: Spark.Options.schema(), 124 | describe: describe(), 125 | snippet: snippet(), 126 | top_level?: top_level?(), 127 | links: links(), 128 | examples: examples(), 129 | modules: modules(), 130 | no_depend_modules: no_depend_modules(), 131 | auto_set_fields: auto_set_fields(), 132 | entities: entities(), 133 | sections: sections(), 134 | docs: docs(), 135 | patchable?: patchable?() 136 | } 137 | end 138 | -------------------------------------------------------------------------------- /lib/spark/dsl/transformer.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Transformer do 2 | @moduledoc """ 3 | A transformer manipulates and/or validates the entire DSL state of a resource. 4 | 5 | It's `transform/1` takes a `map`, which is just the values/configurations at each point 6 | of the DSL. Don't manipulate it directly, if possible, instead use functions like 7 | `get_entities/3` and `replace_entity/4` to manipulate it. 8 | 9 | Use the `after?/1` and `before?/1` callbacks to ensure that your transformer 10 | runs either before or after some other transformer. 11 | 12 | Return `true` in `after_compile/0` to have the transformer run in an `after_compile` hook, 13 | but keep in mind that no modifications to the dsl structure will be retained, so there is no 14 | real point in modifying the dsl that you return. 15 | """ 16 | @callback transform(map) :: 17 | :ok 18 | | {:ok, map} 19 | | {:error, term} 20 | | {:warn, map, String.t() | list(String.t())} 21 | | :halt 22 | @callback before?(module) :: boolean 23 | @callback after?(module) :: boolean 24 | @callback after_compile?() :: boolean 25 | 26 | defmacro __using__(_) do 27 | quote generated: true do 28 | @behaviour Spark.Dsl.Transformer 29 | 30 | def before?(_), do: false 31 | def after?(_), do: false 32 | def after_compile?, do: false 33 | 34 | defoverridable before?: 1, after?: 1, after_compile?: 0 35 | end 36 | end 37 | 38 | @doc """ 39 | Saves a value into the dsl config with the given key. 40 | 41 | This can be used to precompute some information and cache it onto the resource, 42 | or simply store a computed value. It can later be retrieved with `Spark.Dsl.Extension.get_persisted/3`. 43 | """ 44 | def persist(dsl, key, value) do 45 | Map.update(dsl, :persist, %{key => value}, &Map.put(&1, key, value)) 46 | end 47 | 48 | @doc """ 49 | Runs the function in an async compiler. 50 | 51 | Use this for compiling new modules and having them compiled 52 | efficiently asynchronously. 53 | """ 54 | def async_compile(dsl, fun) do 55 | task = Spark.Dsl.Extension.do_async_compile(fun) 56 | 57 | tasks = get_persisted(dsl, :spark_compile_tasks, []) 58 | persist(dsl, :spark_compile_tasks, [task | tasks]) 59 | end 60 | 61 | @doc """ 62 | Add a quoted expression to be evaluated in the DSL module's context. 63 | 64 | Use this *extremely sparingly*. It should almost never be necessary, unless building certain 65 | extensions that *require* the module in question to define a given function. 66 | 67 | What you likely want is either one of the DSL introspection functions, like `Spark.Dsl.Extension.get_entities/2` 68 | or `Spark.Dsl.Extension.get_opt/5)`. If you simply want to store a custom value that can be retrieved easily, or 69 | cache some precomputed information onto the resource, use `persist/3`. 70 | 71 | Provide the dsl state, bindings that should be unquote-able, and the quoted block 72 | to evaluate in the module. For example, if we wanted to support a `resource.primary_key()` function 73 | that would return the primary key (this is unnecessary, just an example), we might do this: 74 | 75 | ```elixir 76 | fields = the_primary_key_fields 77 | 78 | dsl_state = 79 | Spark.Dsl.Transformer.eval( 80 | dsl_state, 81 | [fields: fields], 82 | quote do 83 | def primary_key() do 84 | unquote(fields) 85 | end 86 | end 87 | ) 88 | ``` 89 | """ 90 | def eval(dsl, bindings, block) do 91 | to_eval = {block, bindings} 92 | 93 | Map.update( 94 | dsl, 95 | :eval, 96 | [to_eval], 97 | &[to_eval | &1] 98 | ) 99 | end 100 | 101 | def get_persisted(dsl, key, default \\ nil) do 102 | dsl 103 | |> Map.get(:persist, %{}) 104 | |> Map.get(key, default) 105 | end 106 | 107 | def fetch_persisted(dsl, key) do 108 | dsl 109 | |> Map.get(:persist, %{}) 110 | |> Map.fetch(key) 111 | end 112 | 113 | def build_entity!(extension, path, name, opts) do 114 | case build_entity(extension, path, name, opts) do 115 | {:ok, entity} -> 116 | entity 117 | 118 | {:error, error} -> 119 | if is_exception(error) do 120 | raise error 121 | else 122 | raise "Error building entity #{inspect(error)}" 123 | end 124 | end 125 | end 126 | 127 | def build_entity(extension, path, name, opts) do 128 | do_build_entity(extension.sections(), path, name, opts) 129 | end 130 | 131 | defp do_build_entity(sections, [section_name], name, opts) do 132 | section = Enum.find(sections, &(&1.name == section_name)) 133 | entity = Enum.find(section.entities, &(&1.name == name)) 134 | 135 | do_build(entity, opts) 136 | end 137 | 138 | defp do_build_entity( 139 | sections, 140 | [section_name, maybe_entity_name], 141 | maybe_nested_entity_name, 142 | opts 143 | ) do 144 | section = Enum.find(sections, &(&1.name == section_name)) 145 | 146 | entity = 147 | if section do 148 | Enum.find(section.entities, &(&1.name == maybe_entity_name)) 149 | end 150 | 151 | sub_entity = 152 | if entity do 153 | entity.entities 154 | |> Keyword.values() 155 | |> List.flatten() 156 | |> Enum.find(&(&1.name == maybe_nested_entity_name)) 157 | end 158 | 159 | if sub_entity do 160 | do_build(sub_entity, opts) 161 | else 162 | do_build_entity(section.sections, [maybe_entity_name], maybe_nested_entity_name, opts) 163 | end 164 | end 165 | 166 | defp do_build_entity(sections, [section_name | rest], name, opts) do 167 | section = Enum.find(sections, &(&1.name == section_name)) 168 | do_build_entity(section.sections, rest, name, opts) 169 | end 170 | 171 | defp do_build(entity, opts) do 172 | entity_names = 173 | entity.entities 174 | |> Kernel.||([]) 175 | |> Keyword.keys() 176 | 177 | {entities, opts} = Keyword.split(opts, entity_names) 178 | 179 | {before_validate_auto, after_validate_auto} = 180 | Keyword.split(entity.auto_set_fields || [], Keyword.keys(entity.schema)) 181 | 182 | with {:ok, opts} <- 183 | Spark.Options.validate( 184 | Keyword.merge(opts, before_validate_auto), 185 | entity.schema 186 | ), 187 | opts <- Keyword.merge(opts, after_validate_auto) do 188 | result = struct(struct(entity.target, opts), entities) 189 | 190 | case Spark.Dsl.Entity.transform(entity.transform, result) do 191 | {:ok, built} -> 192 | Spark.Dsl.Entity.maybe_apply_identifier(built, entity.identifier) 193 | 194 | other -> 195 | other 196 | end 197 | else 198 | {:error, error} -> 199 | {:error, error} 200 | end 201 | end 202 | 203 | def add_entity(dsl_state, path, entity, opts \\ []) do 204 | Map.update(dsl_state, path, %{entities: [entity], opts: []}, fn config -> 205 | Map.update(config, :entities, [entity], fn entities -> 206 | if (opts[:type] || :prepend) == :prepend do 207 | [entity | entities] 208 | else 209 | entities ++ [entity] 210 | end 211 | end) 212 | end) 213 | end 214 | 215 | def remove_entity(dsl_state, path, func) do 216 | Map.update(dsl_state, path, %{entities: [], opts: []}, fn config -> 217 | Map.update(config, :entities, [], fn entities -> 218 | Enum.reject(entities, func) 219 | end) 220 | end) 221 | end 222 | 223 | def get_entities(dsl_state, path) do 224 | dsl_state 225 | |> Map.get(path, %{entities: []}) 226 | |> Map.get(:entities, []) 227 | end 228 | 229 | def fetch_option(dsl_state, path, option) do 230 | dsl_state 231 | |> Map.get(path, %{opts: []}) 232 | |> Map.get(:opts) 233 | |> Kernel.||([]) 234 | |> Keyword.fetch(option) 235 | end 236 | 237 | def get_option(dsl_state, path, option, default \\ nil) do 238 | dsl_state 239 | |> Map.get(path, %{opts: []}) 240 | |> Map.get(:opts) 241 | |> Kernel.||([]) 242 | |> Keyword.get(option, default) 243 | end 244 | 245 | def set_option(dsl_state, path, option, value) do 246 | dsl_state 247 | |> Map.put_new(path, %{opts: []}) 248 | |> Map.update!(path, fn existing_opts -> 249 | existing_opts 250 | |> Map.put_new(:opts, []) 251 | |> Map.update!(:opts, fn opts -> 252 | Keyword.put(opts, option, value) 253 | end) 254 | end) 255 | end 256 | 257 | def replace_entity(dsl_state, path, replacement, matcher \\ nil) do 258 | matcher = 259 | matcher || 260 | fn record -> 261 | record.__struct__ == replacement.__struct__ and 262 | record.__identifier__ == replacement.__identifier__ 263 | end 264 | 265 | Map.replace_lazy(dsl_state, path, fn config -> 266 | Map.replace_lazy(config, :entities, fn entities -> 267 | replace_match(entities, replacement, matcher) 268 | end) 269 | end) 270 | end 271 | 272 | defp replace_match(entities, replacement, matcher) do 273 | Enum.map(entities, fn entity -> 274 | if matcher.(entity) do 275 | replacement 276 | else 277 | entity 278 | end 279 | end) 280 | end 281 | 282 | def sort(transformers) do 283 | digraph = :digraph.new() 284 | 285 | transformers 286 | |> Enum.each(fn transformer -> 287 | :digraph.add_vertex(digraph, transformer) 288 | end) 289 | 290 | transformers 291 | |> Enum.each(fn left -> 292 | transformers 293 | |> Enum.each(fn right -> 294 | if left != right do 295 | left_before_right? = left.before?(right) || right.after?(left) 296 | left_after_right? = left.after?(right) || right.before?(left) 297 | 298 | cond do 299 | # This is annoying, but some modules have `def after?(_), do: true` 300 | # The idea being that they'd like to go after everything that isn't 301 | # explicitly after it. Same with `def before?(_), do: true` 302 | left_before_right? && left_after_right? -> 303 | :ok 304 | 305 | left_before_right? -> 306 | :digraph.add_edge(digraph, left, right) 307 | 308 | left_after_right? -> 309 | :digraph.add_edge(digraph, right, left) 310 | 311 | true -> 312 | :ok 313 | end 314 | end 315 | end) 316 | end) 317 | 318 | transformers = walk_rest(digraph) 319 | :digraph.delete(digraph) 320 | 321 | transformers 322 | end 323 | 324 | defp walk_rest(digraph, acc \\ []) do 325 | case :digraph.vertices(digraph) do 326 | [] -> 327 | Enum.reverse(acc) 328 | 329 | vertices -> 330 | case Enum.find(vertices, &(:digraph.in_neighbours(digraph, &1) == [])) do 331 | nil -> 332 | case Enum.find(vertices, &(:digraph.out_neighbours(digraph, &1) == [])) do 333 | nil -> 334 | raise "Cycle detected in transformer order" 335 | 336 | vertex -> 337 | :digraph.del_vertex(digraph, vertex) 338 | walk_rest(digraph, acc ++ [vertex]) 339 | end 340 | 341 | vertex -> 342 | :digraph.del_vertex(digraph, vertex) 343 | walk_rest(digraph, [vertex | acc]) 344 | end 345 | end 346 | end 347 | end 348 | -------------------------------------------------------------------------------- /lib/spark/dsl/verifier.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Verifier do 2 | @moduledoc """ 3 | A verifier gets the dsl state and can return `:ok` or `:error`. 4 | 5 | In a verifier, you can reference and depend on other modules without causing compile time dependencies. 6 | """ 7 | @callback verify(map) :: 8 | :ok 9 | | {:error, term} 10 | | {:warn, String.t() | list(String.t())} 11 | 12 | defmacro __using__(_) do 13 | quote generated: true do 14 | @behaviour Spark.Dsl.Verifier 15 | end 16 | end 17 | 18 | defdelegate get_persisted(dsl, key), to: Spark.Dsl.Transformer 19 | defdelegate get_persisted(dsl, key, default), to: Spark.Dsl.Transformer 20 | defdelegate get_option(dsl_state, path, option), to: Spark.Dsl.Transformer 21 | defdelegate get_option(dsl_state, path, option, default), to: Spark.Dsl.Transformer 22 | defdelegate fetch_option(dsl_state, path, option), to: Spark.Dsl.Transformer 23 | defdelegate get_entities(dsl_state, path), to: Spark.Dsl.Transformer 24 | end 25 | -------------------------------------------------------------------------------- /lib/spark/dsl/verifiers/verify_entity_uniqueness.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Dsl.Verifiers.VerifyEntityUniqueness do 2 | @moduledoc """ 3 | Verifies that each entity that has an identifier is unique at each path. 4 | """ 5 | 6 | use Spark.Dsl.Verifier 7 | 8 | alias Spark.Dsl.Verifier 9 | 10 | def verify(dsl_state) do 11 | module = Verifier.get_persisted(dsl_state, :module) 12 | 13 | dsl_state 14 | |> Verifier.get_persisted(:extensions) 15 | |> Enum.each(fn extension -> 16 | Enum.each(extension.sections(), fn section -> 17 | verify_entity_uniqueness(module, section, dsl_state) 18 | end) 19 | end) 20 | 21 | :ok 22 | end 23 | 24 | defp verify_entity_uniqueness(module, section, dsl_state, path \\ []) do 25 | section_path = path ++ [section.name] 26 | 27 | section.entities 28 | |> Enum.each(fn entity -> 29 | do_verify_entity_uniqueness(module, entity, section_path, dsl_state) 30 | end) 31 | 32 | Enum.each(section.sections, fn section -> 33 | verify_entity_uniqueness(module, section, dsl_state, section_path) 34 | end) 35 | 36 | section.entities 37 | |> Enum.each(fn entity -> 38 | entities_to_check = Verifier.get_entities(dsl_state, section_path) 39 | 40 | entity.entities 41 | |> Enum.flat_map(fn {key, nested_entities} -> 42 | Enum.map(nested_entities, &{key, &1}) 43 | end) 44 | |> Enum.each(fn {key, nested_entity} -> 45 | verify_nested_entity_uniqueness( 46 | module, 47 | nested_entity, 48 | section_path, 49 | entities_to_check, 50 | [key] 51 | ) 52 | end) 53 | end) 54 | end 55 | 56 | defp verify_nested_entity_uniqueness( 57 | module, 58 | nested_entity, 59 | section_path, 60 | entities_to_check, 61 | nested_entity_path 62 | ) do 63 | unique_entities_or_error( 64 | entities_to_check, 65 | nested_entity.identifier, 66 | module, 67 | section_path ++ nested_entity_path 68 | ) 69 | 70 | entities_to_check 71 | |> Enum.each(fn entity_to_check -> 72 | nested_entity.entities 73 | |> Enum.flat_map(fn {key, nested_entities} -> 74 | Enum.map(nested_entities, &{key, &1}) 75 | end) 76 | |> Enum.filter(fn {_, nested_entity} -> 77 | nested_entity.identifier 78 | end) 79 | |> Enum.each(fn {key, nested_entity} -> 80 | nested_entities_to_check = 81 | entity_to_check 82 | |> Map.get(key) 83 | |> List.wrap() 84 | 85 | verify_nested_entity_uniqueness( 86 | module, 87 | nested_entity, 88 | section_path, 89 | nested_entities_to_check, 90 | nested_entity_path ++ [key] 91 | ) 92 | end) 93 | end) 94 | end 95 | 96 | defp do_verify_entity_uniqueness(module, entity, section_path, dsl_state) do 97 | dsl_state 98 | |> Verifier.get_entities(section_path) 99 | |> Enum.filter(&(&1.__struct__ == entity.target)) 100 | |> unique_entities_or_error(entity.identifier, module, section_path) 101 | end 102 | 103 | defp unique_entities_or_error(_, nil, _, _), do: :ok 104 | 105 | defp unique_entities_or_error(entities_to_check, identifier, module, path) do 106 | entities_to_check 107 | |> Enum.frequencies_by(&{get_identifier(&1, identifier), &1.__struct__}) 108 | |> Enum.find_value(fn {key, value} -> 109 | if value > 1 do 110 | key 111 | end 112 | end) 113 | |> case do 114 | nil -> 115 | :ok 116 | 117 | {identifier, target} -> 118 | raise Spark.Error.DslError, 119 | module: module, 120 | path: path ++ [identifier], 121 | message: """ 122 | Got duplicate #{inspect(target)}: #{identifier} 123 | """ 124 | end 125 | end 126 | 127 | defp get_identifier(record, {:auto, _}), do: record.__identifier__ 128 | defp get_identifier(record, identifier), do: Map.get(record, identifier) 129 | end 130 | -------------------------------------------------------------------------------- /lib/spark/elixir_sense/aliases.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.ElixirSense.Types do 2 | @moduledoc false 3 | 4 | alias ElixirSense.Core.Introspection 5 | 6 | cond do 7 | Code.ensure_loaded?(ElixirSense.Providers.Plugins.Util) -> 8 | @util ElixirSense.Providers.Plugins.Util 9 | 10 | Code.ensure_loaded?(ElixirLS.LanguageServer.Plugins.Util) -> 11 | @util ElixirLS.LanguageServer.Plugins.Util 12 | 13 | true -> 14 | @util ElixirSense.Plugins.Util 15 | end 16 | 17 | if Code.ensure_loaded?(ElixirLS.Utils.Matcher) do 18 | @matcher ElixirLS.Utils.Matcher 19 | else 20 | @matcher ElixirSense.Providers.Suggestion.Matcher 21 | end 22 | 23 | def find_builtin_types(module, func, hint, cursor_context, templates \\ []) do 24 | text_before = cursor_context.text_before 25 | text_after = cursor_context.text_after 26 | 27 | actual_hint = 28 | if String.ends_with?(text_before, "{" <> hint) do 29 | "{" <> hint 30 | else 31 | hint 32 | end 33 | 34 | with true <- Code.ensure_loaded?(module), 35 | true <- :erlang.function_exported(module, func, 0) do 36 | for {name, _, _} = type <- builtin_types(module, func, templates), 37 | apply(@matcher, :match?, [name, actual_hint]) do 38 | buitin_type_to_suggestion(type, module, actual_hint, text_after) 39 | end 40 | end 41 | end 42 | 43 | defp builtin_types(module, func, templates) do 44 | module 45 | |> apply(func, []) 46 | |> Enum.map(fn {name, value} -> 47 | {inspect(name), value, nil} 48 | end) 49 | |> Enum.concat(templates) 50 | end 51 | 52 | def find_custom_types(type_module, func, hint, module_store) do 53 | builtin_types = Keyword.values(apply(type_module, func, [])) 54 | 55 | for module <- module_store.by_behaviour[type_module] || [], 56 | module not in builtin_types, 57 | type_str = inspect(module), 58 | apply(@util, :match_module?, [type_str, hint]) do 59 | custom_type_to_suggestion(module, type_module, hint) 60 | end 61 | end 62 | 63 | defp buitin_type_to_suggestion({type, mod_or_description, snippet}, module, hint, text_after) do 64 | [_, hint_prefix] = Regex.run(~r/(.*?)[\w0-9\._!\?\->]*$/, hint) 65 | 66 | insert_text = String.replace_prefix(type, hint_prefix, "") 67 | snippet = snippet && String.replace_prefix(snippet, hint_prefix, "") 68 | 69 | {insert_text, snippet} = 70 | if String.starts_with?(text_after, "}") do 71 | snippet = snippet && String.replace_suffix(snippet, "}", "") 72 | insert_text = String.replace_suffix(insert_text, "}", "") 73 | {insert_text, snippet} 74 | else 75 | {insert_text, snippet} 76 | end 77 | 78 | docs = 79 | if is_binary(mod_or_description) do 80 | mod_or_description 81 | else 82 | {doc, _} = apply(Introspection, :get_module_docs_summary, [mod_or_description]) 83 | doc 84 | end 85 | 86 | doc = """ 87 | Built-in #{inspect(module)} 88 | 89 | #{docs} 90 | """ 91 | 92 | %{ 93 | type: :generic, 94 | kind: :type_parameter, 95 | label: type, 96 | insert_text: insert_text, 97 | snippet: snippet, 98 | detail: inspect(module), 99 | documentation: doc, 100 | priority: 0 101 | } 102 | end 103 | 104 | defp custom_type_to_suggestion(type, module, hint) do 105 | type_str = inspect(type) 106 | {doc, _} = apply(Introspection, :get_module_docs_summary, [type]) 107 | 108 | %{ 109 | type: :generic, 110 | kind: :type_parameter, 111 | label: type_str, 112 | insert_text: apply(Util, :trim_leading_for_insertion, [hint, type_str]), 113 | detail: "Custom #{inspect(module)}", 114 | documentation: doc, 115 | priority: 1 116 | } 117 | end 118 | end 119 | -------------------------------------------------------------------------------- /lib/spark/elixir_sense/entity.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.ElixirSense.Entity do 2 | @moduledoc false 3 | alias ElixirSense.Core.Introspection 4 | alias ElixirSense.Providers.Suggestion.Complete 5 | 6 | cond do 7 | Code.ensure_loaded?(ElixirSense.Providers.Plugins.Util) -> 8 | @util ElixirSense.Providers.Plugins.Util 9 | 10 | Code.ensure_loaded?(ElixirLS.LanguageServer.Plugins.Util) -> 11 | @util ElixirLS.LanguageServer.Plugins.Util 12 | 13 | true -> 14 | @util ElixirSense.Plugins.Util 15 | end 16 | 17 | def find_entities(type, hint) do 18 | for {module, _} <- :code.all_loaded(), 19 | type in (module.module_info(:attributes)[:spark_is] || []), 20 | mod_str = inspect(module), 21 | apply(@util, :match_module?, [mod_str, hint]) do 22 | {doc, _} = apply(Introspection, :get_module_docs_summary, [module]) 23 | 24 | %{ 25 | type: :generic, 26 | kind: :class, 27 | label: mod_str, 28 | insert_text: apply(@util, :trim_leading_for_insertion, [hint, mod_str]), 29 | detail: "Spark resource", 30 | documentation: doc 31 | } 32 | end 33 | end 34 | 35 | def find_spark_behaviour_impls(behaviour, builtins, hint, module_store) do 36 | builtins = 37 | if builtins && !String.contains?(hint, ".") && lowercase_string?(hint) do 38 | cond do 39 | Code.ensure_loaded?(ElixirSense.Providers.Completion.CompletionEngine) -> 40 | apply(ElixirSense.Providers.Completion.CompletionEngine, :complete, [ 41 | to_string("#{inspect(builtins)}.#{hint}"), 42 | apply(ElixirSense.Core.State.Env, :__struct__, []), 43 | apply(ElixirSense.Core.Metadata, :__struct__, []), 44 | 0 45 | ]) 46 | 47 | Code.ensure_loaded?(ElixirLS.Utils.CompletionEngine) -> 48 | apply(ElixirLS.Utils.CompletionEngine, :complete, [ 49 | to_string("#{inspect(builtins)}.#{hint}"), 50 | apply(ElixirSense.Core.State.Env, :__struct__, []), 51 | apply(ElixirSense.Core.Metadata, :__struct__, []), 52 | 0 53 | ]) 54 | 55 | function_exported?(Complete, :complete, 4) -> 56 | apply(Complete, :complete, [ 57 | to_string("#{inspect(builtins)}.#{hint}"), 58 | apply(ElixirSense.Core.State.Env, :__struct__, []), 59 | apply(ElixirSense.Core.Metadata, :__struct__, []), 60 | 0 61 | ]) 62 | 63 | true -> 64 | apply(Complete, :complete, [ 65 | to_string("#{inspect(builtins)}.#{hint}"), 66 | apply(Complete.Env, :__struct__, []) 67 | ]) 68 | end 69 | else 70 | [] 71 | end 72 | |> Enum.reject(fn 73 | %{name: name} when name in ["__info__", "module_info", "module_info"] -> 74 | true 75 | 76 | _ -> 77 | false 78 | end) 79 | |> Enum.map(fn 80 | %{type: :function, origin: origin, name: name, arity: arity} = completion -> 81 | try do 82 | {:docs_v1, _, _, _, _, _, functions} = Code.fetch_docs(Module.concat([origin])) 83 | 84 | new_summary = 85 | Enum.find_value(functions, fn 86 | {{:function, func_name, func_arity}, _, _, 87 | %{ 88 | "en" => docs 89 | }, _} -> 90 | if to_string(func_name) == name && func_arity == arity do 91 | docs 92 | end 93 | 94 | _other -> 95 | false 96 | end) 97 | 98 | %{completion | summary: new_summary || completion.summary} 99 | rescue 100 | _e -> 101 | completion 102 | end 103 | 104 | other -> 105 | other 106 | end) 107 | 108 | first = behaviour |> Module.split() |> Enum.at(0) 109 | 110 | custom = 111 | for module <- module_store.by_behaviour[behaviour] || [], 112 | mod_str = inspect(module), 113 | !String.starts_with?(mod_str, "#{first}."), 114 | apply(@util, :match_module?, [mod_str, hint]) do 115 | {doc, _} = apply(Introspection, :get_module_docs_summary, [module]) 116 | 117 | %{ 118 | type: :generic, 119 | kind: :class, 120 | label: mod_str, 121 | insert_text: apply(@util, :trim_leading_for_insertion, [hint, mod_str]), 122 | detail: "#{inspect(behaviour)}", 123 | documentation: doc 124 | } 125 | end 126 | 127 | builtins ++ custom 128 | end 129 | 130 | def find_behaviour_impls(behaviour, hint, module_store) do 131 | for module <- module_store.by_behaviour[behaviour] || [], 132 | mod_str = inspect(module), 133 | apply(@util, :match_module?, [mod_str, hint]), 134 | !asks_to_skip?(module) do 135 | {doc, _} = apply(Introspection, :get_module_docs_summary, [module]) 136 | 137 | %{ 138 | type: :generic, 139 | kind: :class, 140 | label: mod_str, 141 | insert_text: apply(@util, :trim_leading_for_insertion, [hint, mod_str]), 142 | detail: "#{inspect(behaviour)}", 143 | documentation: doc 144 | } 145 | end 146 | end 147 | 148 | defp asks_to_skip?(module) do 149 | Code.ensure_loaded?(module) && function_exported?(module, :skip_in_spark_autocomplete, 0) && 150 | module.skip_in_spark_autocomplete() 151 | end 152 | 153 | defp lowercase_string?(""), do: true 154 | 155 | defp lowercase_string?(string) do 156 | first = String.first(string) 157 | String.downcase(first) == first 158 | end 159 | end 160 | -------------------------------------------------------------------------------- /lib/spark/error/dsl_error.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Error.DslError do 2 | @moduledoc "Used when a DSL is incorrectly configured." 3 | @attrs [:module, :message, :path, :stacktrace] 4 | defexception @attrs 5 | 6 | @type t :: %__MODULE__{ 7 | __exception__: true, 8 | module: nil | module, 9 | message: String.t() | any, 10 | path: [:atom], 11 | stacktrace: any 12 | } 13 | 14 | defmodule Stacktrace do 15 | @moduledoc false 16 | defstruct [:stacktrace] 17 | 18 | defimpl Inspect do 19 | def inspect(_, _) do 20 | "%Stacktrace{}" 21 | end 22 | end 23 | end 24 | 25 | @impl true 26 | def exception(message) when is_binary(message), do: exception(message: message) 27 | 28 | def exception(opts) do 29 | {:current_stacktrace, stacktrace} = 30 | Process.info(self(), :current_stacktrace) 31 | 32 | opts = 33 | opts 34 | |> Enum.to_list() 35 | |> Keyword.put(:stacktrace, %Stacktrace{stacktrace: stacktrace}) 36 | |> Keyword.take(@attrs) 37 | 38 | struct!(__MODULE__, opts) 39 | end 40 | 41 | @impl true 42 | def message(%{module: module, message: message, path: blank}) 43 | when is_nil(blank) or blank == [] do 44 | "#{module_line(module)}#{get_message(message)}" 45 | end 46 | 47 | def message(%{module: module, message: message, path: dsl_path}) do 48 | dsl_path = 49 | Enum.map_join(dsl_path, " -> ", fn item -> 50 | try do 51 | to_string(item) 52 | rescue 53 | _ -> 54 | inspect(item) 55 | end 56 | end) 57 | 58 | "#{module_line(module)}#{dsl_path}:\n #{get_message(message)}" 59 | end 60 | 61 | defp get_message(message) when is_exception(message) do 62 | Exception.format(:error, message) 63 | end 64 | 65 | defp get_message(message) when is_binary(message) do 66 | message 67 | end 68 | 69 | defp get_message(message) do 70 | inspect(message) 71 | end 72 | 73 | defp module_line(nil), do: "" 74 | 75 | defp module_line(module) do 76 | "[#{normalize_module_name(module)}]\n" 77 | end 78 | 79 | defp normalize_module_name(module) do 80 | inspect(module) 81 | end 82 | end 83 | -------------------------------------------------------------------------------- /lib/spark/formatter.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Sourceror) do 2 | defmodule Spark.Formatter do 3 | @moduledoc """ 4 | Formats Spark modules. 5 | 6 | Currently, it is very simple, and will only reorder the outermost sections according to some rules. 7 | 8 | # Plugin 9 | 10 | Include the plugin into your `.formatter.exs` like so `plugins: [Spark.Formatter]`. 11 | 12 | If no configuration is provided, it will sort all top level DSL sections *alphabetically*. 13 | 14 | # Section Order 15 | 16 | To provide a custom section order, add configuration to your app, for example: 17 | 18 | ```elixir 19 | config :spark, :formatter, 20 | remove_parens?: true, 21 | "Ash.Resource": [ 22 | section_order: [ 23 | :resource, 24 | :postgres, 25 | :attributes, 26 | :relationships, 27 | :aggregates, 28 | :calculations 29 | ] 30 | ], 31 | "MyApp.Resource": [ 32 | # Use this if you use a module that is not the spark DSL itself. 33 | # For example, you might have a "base" that you use instead that sets some simple defaults. 34 | 35 | # This tells us what the actual thing is so we know what extensions are included automatically. 36 | type: Ash.Resource, 37 | 38 | # Tell us what extensions might be added under the hood 39 | extensions: [MyApp.ResourceExtension], 40 | section_order: [...] 41 | ] 42 | ``` 43 | 44 | Any sections found that aren't in that list will be left in the order that they were in, the sections 45 | in the list will be sorted "around" those sections. E.g the following list: `[:code_interface, :attributes]` can be interpreted as 46 | "ensure that code_interface comes before attributes, and don't change the rest". 47 | """ 48 | @behaviour Mix.Tasks.Format 49 | 50 | require Logger 51 | 52 | def features(_opts) do 53 | [extensions: [".ex", ".exs"]] 54 | end 55 | 56 | def format(contents, opts) do 57 | config = 58 | :spark 59 | |> Application.get_env(:formatter, []) 60 | |> Enum.map(fn 61 | {key, value} when key in [:remove_parens?] -> 62 | {key, value} 63 | 64 | {key, value} -> 65 | {Module.concat([key]), value} 66 | end) 67 | 68 | parse_result = 69 | try do 70 | {:ok, Sourceror.parse_string!(contents)} 71 | rescue 72 | _ -> 73 | :error 74 | end 75 | 76 | case parse_result do 77 | {:ok, parsed} -> 78 | parsed 79 | |> format_resources(opts, config) 80 | |> then(fn patches -> 81 | Sourceror.patch_string(contents, patches) 82 | end) 83 | |> Code.format_string!(opts_without_plugin(opts)) 84 | |> then(fn iodata -> 85 | [iodata, ?\n] 86 | end) 87 | |> IO.iodata_to_binary() 88 | 89 | :error -> 90 | contents 91 | end 92 | end 93 | 94 | defp format_resources(parsed, opts, config) do 95 | {_, patches} = 96 | Spark.CodeHelpers.prewalk(parsed, [], false, fn 97 | {:defmodule, _, [_, [{{:__block__, _, [:do]}, {:__block__, _, body}}]]} = expr, 98 | patches, 99 | false -> 100 | case get_extensions(body, config) do 101 | {:ok, extensions, type, using} -> 102 | replacement = format_resource(body, extensions, config, type, using) 103 | 104 | patches = 105 | body 106 | |> Enum.zip(replacement) 107 | |> Enum.reduce(patches, fn {body_section, replacement_section}, patches -> 108 | if body_section == replacement_section do 109 | patches 110 | else 111 | [ 112 | %{ 113 | range: Sourceror.get_range(body_section, include_comments: true), 114 | change: Sourceror.to_string(replacement_section, opts) 115 | } 116 | | patches 117 | ] 118 | end 119 | end) 120 | 121 | {expr, patches, true} 122 | 123 | _ -> 124 | {expr, patches, true} 125 | end 126 | 127 | expr, patches, branch_acc -> 128 | {expr, patches, branch_acc} 129 | end) 130 | 131 | patches 132 | end 133 | 134 | defp format_resource(body, extensions, config, _type, using) do 135 | sections = 136 | extensions 137 | |> Enum.flat_map(fn extension -> 138 | Enum.map(extension.sections(), fn section -> 139 | {extension, section} 140 | end) 141 | end) 142 | |> sort_sections(config[using][:section_order]) 143 | 144 | section_names = Enum.map(sections, fn {_, section} -> section.name end) 145 | 146 | {section_exprs, non_section_exprs} = 147 | body 148 | |> Enum.with_index() 149 | |> Enum.split_with(fn {{name, _, _}, _index} -> 150 | name in section_names 151 | end) 152 | 153 | new_sections = 154 | if config[using][:section_order] && config[using][:section_order] != [] do 155 | Enum.sort_by(section_exprs, fn {{name, _, _}, _} -> 156 | Enum.find_index(section_names, &(&1 == name)) 157 | end) 158 | else 159 | section_exprs 160 | end 161 | 162 | new_section_indexes = 163 | section_exprs 164 | |> Enum.map(&elem(&1, 1)) 165 | |> Enum.sort() 166 | 167 | new_sections = 168 | Enum.zip_with(new_sections, new_section_indexes, fn {new_section, _}, index -> 169 | {new_section, index} 170 | end) 171 | 172 | non_section_exprs 173 | |> Enum.concat(new_sections) 174 | |> Enum.sort_by(&elem(&1, 1)) 175 | |> Enum.map(&elem(&1, 0)) 176 | |> then(fn sections -> 177 | if config[:remove_parens?] do 178 | de_paren(sections, Enum.flat_map(extensions, & &1.sections()), extensions) 179 | else 180 | sections 181 | end 182 | end) 183 | end 184 | 185 | defp de_paren(actual_sections, dsl_sections, extensions) do 186 | actual_sections 187 | |> Enum.map(fn 188 | {name, meta, body} -> 189 | case Enum.find(dsl_sections, &(&1.name == name)) do 190 | nil -> 191 | {name, meta, body} 192 | 193 | section -> 194 | {name, meta, de_paren_section(body, section, extensions)} 195 | end 196 | 197 | other -> 198 | other 199 | end) 200 | end 201 | 202 | defp de_paren_section(body, section, extensions) do 203 | builders = all_entity_builders([section], extensions) 204 | 205 | Macro.prewalk(body, fn 206 | {func, meta, body} = node when is_atom(func) -> 207 | count = Enum.count(List.wrap(body)) 208 | 209 | builders = Keyword.get_values(builders, func) 210 | 211 | if Enum.any?(builders, &(&1 in [count, count - 1])) && 212 | Keyword.keyword?(meta) && 213 | meta[:closing] do 214 | {func, Keyword.delete(meta, :closing), body} 215 | else 216 | node 217 | end 218 | 219 | node -> 220 | node 221 | end) 222 | end 223 | 224 | defp sort_sections(sections, nil), do: sections 225 | defp sort_sections(sections, []), do: sections 226 | 227 | defp sort_sections(sections, section_order) do 228 | {ordered, unordered} = 229 | sections 230 | |> Enum.with_index() 231 | |> Enum.split_with(fn {{_, section}, _} -> 232 | section.name in section_order 233 | end) 234 | 235 | reordered = 236 | ordered 237 | |> Enum.map(&elem(&1, 0)) 238 | |> Enum.sort_by(fn {_, section} -> 239 | Enum.find_index(section_order, &(&1 == section.name)) 240 | end) 241 | 242 | Enum.reduce(unordered, reordered, fn {{extension, section}, i}, acc -> 243 | List.insert_at(acc, i, {extension, section}) 244 | end) 245 | end 246 | 247 | defp get_extensions(body, config) do 248 | Enum.find_value(body, :error, fn 249 | {:use, _, using} -> 250 | [using, opts] = 251 | case Spark.Dsl.Extension.expand_alias(using, __ENV__) do 252 | [using] -> 253 | [using, []] 254 | 255 | [using, opts] -> 256 | [using, opts] 257 | end 258 | 259 | if Keyword.has_key?(config, using) do 260 | type = config[using][:type] || using 261 | {:ok, parse_extensions(opts, config[using], type), type, using} 262 | end 263 | 264 | _ -> 265 | nil 266 | end) 267 | end 268 | 269 | defp parse_extensions(blocks, config, type) do 270 | blocks 271 | |> Enum.flat_map(fn {{:__block__, _, _}, extensions} -> 272 | extensions 273 | |> case do 274 | {:__block__, _, [extensions]} -> 275 | extensions 276 | 277 | extension when is_atom(extension) -> 278 | extension 279 | 280 | _ -> 281 | [] 282 | end 283 | |> List.wrap() 284 | |> Enum.flat_map(fn extension -> 285 | case is_atom(extension) and Code.ensure_compiled(extension) do 286 | {:module, module} -> 287 | if Spark.implements_behaviour?(module, Spark.Dsl.Extension) do 288 | [module] 289 | else 290 | [] 291 | end 292 | 293 | _ -> 294 | [] 295 | end 296 | end) 297 | end) 298 | |> Enum.concat(config[:extensions] || []) 299 | |> Enum.concat(type.default_extensions() || []) 300 | |> Enum.flat_map(fn extension -> 301 | [extension | extension.add_extensions()] 302 | end) 303 | end 304 | 305 | defp opts_without_plugin(opts) do 306 | Keyword.update(opts, :plugins, [], &(&1 -- [__MODULE__])) 307 | end 308 | 309 | @doc false 310 | def all_entity_builders(sections, extensions, path \\ []) do 311 | Enum.flat_map(sections, fn section -> 312 | Enum.concat([ 313 | all_entity_option_builders(section), 314 | all_patch_entity_builders(extensions, section, path), 315 | section_option_builders(section), 316 | section_entity_builders(section, extensions, path) 317 | ]) 318 | end) 319 | |> Enum.uniq() 320 | |> Enum.sort() 321 | end 322 | 323 | defp all_patch_entity_builders(extensions, section, path) do 324 | match_path = path ++ [section.name] 325 | 326 | extensions 327 | |> Enum.flat_map(& &1.dsl_patches()) 328 | |> tap(fn patches -> 329 | Enum.map(patches, & &1.section_path) 330 | end) 331 | |> Enum.filter(fn 332 | %Spark.Dsl.Patch.AddEntity{section_path: ^match_path} -> 333 | true 334 | 335 | _ -> 336 | false 337 | end) 338 | |> Enum.map(& &1.entity) 339 | |> Enum.flat_map(&entity_option_builders/1) 340 | end 341 | 342 | defp section_entity_builders(section, extensions, path) do 343 | match_path = path ++ [section.name] 344 | 345 | mixed_in_entities = 346 | extensions 347 | |> Enum.flat_map(& &1.dsl_patches()) 348 | |> Enum.filter(fn 349 | %Spark.Dsl.Patch.AddEntity{section_path: ^match_path} -> 350 | true 351 | 352 | _ -> 353 | false 354 | end) 355 | |> Enum.map(& &1.entity) 356 | 357 | Enum.flat_map(section.entities ++ mixed_in_entities, fn entity -> 358 | entity_builders(entity) 359 | end) ++ all_entity_builders(section.sections, extensions, path ++ [section.name]) 360 | end 361 | 362 | def entity_builders(entity) do 363 | arg_count = Enum.count(entity.args) 364 | non_optional_arg_count = Enum.count(entity.args, &is_atom/1) 365 | 366 | non_optional_arg_count..arg_count 367 | |> Enum.flat_map(&[{entity.name, &1}, {entity.name, &1 + 1}]) 368 | |> Enum.concat(flat_map_nested_entities(entity, &entity_builders/1)) 369 | end 370 | 371 | defp all_entity_option_builders(section) do 372 | Enum.flat_map(section.entities, fn entity -> 373 | entity_option_builders(entity) 374 | end) 375 | end 376 | 377 | @doc false 378 | def entity_option_builders(entity) do 379 | entity_args_to_drop = Spark.Dsl.Entity.required_arg_names(entity) 380 | 381 | entity.schema 382 | |> Keyword.drop(entity_args_to_drop) 383 | |> Enum.map(fn {key, _schema} -> 384 | {key, 1} 385 | end) 386 | |> Kernel.++(flat_map_nested_entities(entity, &entity_option_builders/1)) 387 | end 388 | 389 | defp section_option_builders(section) do 390 | Enum.map(section.schema, fn {key, _} -> 391 | {key, 1} 392 | end) 393 | end 394 | 395 | defp flat_map_nested_entities(entity, mapper) do 396 | Enum.flat_map(entity.entities, fn {_, nested_entities} -> 397 | nested_entities 398 | |> List.wrap() 399 | |> Enum.flat_map(fn nested_entity -> 400 | mapper.(nested_entity) 401 | end) 402 | end) 403 | end 404 | end 405 | else 406 | defmodule Spark.Formatter do 407 | @moduledoc """ 408 | Formats Spark modules. 409 | """ 410 | @behaviour Mix.Tasks.Format 411 | 412 | require Logger 413 | 414 | def features(_opts) do 415 | [extensions: [".ex", ".exs"]] 416 | end 417 | 418 | def format(_content, _opts) do 419 | raise """ 420 | #{inspect(__MODULE__)} requires sourceror to run. Please add it as a dev/test dependency 421 | 422 | defp deps do 423 | [ 424 | ..., 425 | {:sourceror, "~> 1.7", only: [:dev, :test]} 426 | ] 427 | 428 | end 429 | """ 430 | end 431 | end 432 | end 433 | -------------------------------------------------------------------------------- /lib/spark/options/helpers.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Options.Helpers do 2 | @moduledoc """ 3 | Helpers for use with spark options 4 | """ 5 | 6 | def make_required!(options, field) do 7 | Keyword.update!(options, field, &Keyword.put(&1, :required, true)) 8 | end 9 | 10 | def make_optional!(options, field) do 11 | Keyword.update!(options, field, &Keyword.delete(&1, :required)) 12 | end 13 | 14 | def set_type!(options, field, type) do 15 | Keyword.update!(options, field, &Keyword.put(&1, :type, type)) 16 | end 17 | 18 | def set_default!(options, field, value) do 19 | Keyword.update!(options, field, fn config -> 20 | config 21 | |> Keyword.put(:default, value) 22 | |> Keyword.delete(:required) 23 | end) 24 | end 25 | 26 | def append_doc!(options, field, to_append) do 27 | Keyword.update!(options, field, fn opt_config -> 28 | Keyword.update(opt_config, :doc, to_append, fn existing -> 29 | existing <> " - " <> to_append 30 | end) 31 | end) 32 | end 33 | end 34 | -------------------------------------------------------------------------------- /lib/spark/options/validation_error.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Options.ValidationError do 2 | @moduledoc """ 3 | An error that is returned (or raised) when options are invalid. 4 | 5 | Since this is an exception, you can either raise it directly with `raise/1` 6 | or turn it into a message string with `Exception.message/1`. 7 | 8 | See [`%Spark.Options.ValidationError{}`](`__struct__/0`) for documentation on the fields. 9 | """ 10 | 11 | @type t() :: %__MODULE__{ 12 | key: atom(), 13 | keys_path: [atom()], 14 | value: term() 15 | } 16 | 17 | @doc """ 18 | The error struct. 19 | 20 | Only the following documented fields are considered public. All other fields are 21 | considered private and should not be referenced: 22 | 23 | * `:key` (`t:atom/0`) - The key that did not successfully validate. 24 | 25 | * `:keys_path` (list of `t:atom/0`) - If the key is nested, this is the path to the key. 26 | 27 | * `:value` (`t:term/0`) - The value that failed to validate. This field is `nil` if there 28 | was no value provided. 29 | 30 | """ 31 | defexception [:message, :key, :value, keys_path: []] 32 | 33 | @impl true 34 | def message(%__MODULE__{message: message, keys_path: keys_path}) do 35 | suffix = 36 | case keys_path do 37 | [] -> "" 38 | keys -> " (in options #{inspect(keys)})" 39 | end 40 | 41 | message <> suffix 42 | end 43 | end 44 | -------------------------------------------------------------------------------- /lib/spark/options/validator.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Options.Validator do 2 | @moduledoc """ 3 | Defines a validator module for an option schema. 4 | 5 | Validators create structs with keys for each option in their schema, 6 | and an optimized `validate`, and `validate!` function on that struct. 7 | 8 | ## Upgrading from options lists 9 | 10 | You can pass the option `define_deprecated_access?: true` to `use Spark.Options.Validator`, 11 | which will make it such that `options[:foo]` will still work, but will emit a deprecation warning. 12 | This cane help with smoother upgrades. 13 | 14 | ## Example 15 | 16 | Given a module like the following: 17 | 18 | ```elixir 19 | defmodule MyOptions do 20 | use Spark.Options.Validator, schema: [ 21 | foo: [ 22 | type: :string, 23 | required: true 24 | ], 25 | bar: [ 26 | type: :string 27 | ], 28 | baz: [ 29 | type: :integer, 30 | default: 10 31 | ] 32 | ] 33 | end 34 | ``` 35 | 36 | You can use it like so: 37 | 38 | ```elixir 39 | @doc \"\"\" 40 | Does a thing 41 | 42 | ## Options 43 | 44 | \#{MyOptions.docs()} 45 | \""" 46 | @doc spark_opts: [{1, MyOptions.schema()}] 47 | def your_function(arg, opts \\\\ []) do 48 | options = MyOptions.validate!(opts) 49 | 50 | options.foo 51 | options.bar 52 | end 53 | ``` 54 | """ 55 | 56 | defmacro __using__(opts) do 57 | schema = opts[:schema] 58 | define_deprecated_access? = opts[:define_deprecated_access?] 59 | 60 | [ 61 | quote bind_quoted: [schema: schema, define_deprecated_access?: define_deprecated_access?] do 62 | schema = Spark.Options.new!(schema).schema 63 | @schema Keyword.new(schema) 64 | 65 | struct_fields = 66 | Keyword.new(@schema, fn {key, config} -> 67 | case Keyword.fetch(config, :default) do 68 | {:ok, default} -> 69 | case Spark.Options.validate_single_value(config[:type], key, default) do 70 | {:ok, default} -> 71 | {key, default} 72 | 73 | {:error, error} -> 74 | raise error 75 | end 76 | 77 | :error -> 78 | {key, nil} 79 | end 80 | end) 81 | 82 | @defaults @schema 83 | |> Enum.filter(fn {_key, config} -> 84 | Keyword.has_key?(config, :default) 85 | end) 86 | |> Enum.map(&elem(&1, 0)) 87 | 88 | defstruct struct_fields ++ [__set__: @defaults] 89 | 90 | schema_specs = Spark.Options.Docs.schema_specs(@schema, true) 91 | 92 | quote do 93 | @type t :: %__MODULE__{unquote_splicing(schema_specs)} 94 | end 95 | |> Code.eval_quoted([], __ENV__) 96 | 97 | @type options :: [unquote_splicing(schema_specs)] 98 | 99 | required_fields = 100 | Spark.Options.Validator.validate_schema!(@schema) 101 | 102 | @required @schema 103 | |> Enum.filter(fn {_key, config} -> 104 | config[:required] 105 | end) 106 | |> Enum.map(&elem(&1, 0)) 107 | 108 | @valid_options schema 109 | |> Enum.reject(fn {_key, config} -> 110 | config[:private?] 111 | end) 112 | |> Enum.map(&elem(&1, 0)) 113 | 114 | @spec schema :: Spark.Options.schema() 115 | def schema do 116 | @schema 117 | end 118 | 119 | @spec docs(Keyword.t()) :: String.t() 120 | @spec docs() :: String.t() 121 | def docs(opts \\ []) do 122 | Spark.Options.docs(@schema |> Keyword.take(@valid_options), opts) 123 | end 124 | 125 | if define_deprecated_access? do 126 | def fetch(%__MODULE__{} = data, key) do 127 | IO.warn( 128 | "Accessing options from #{__MODULE__} is deprecated. Use `opts.#{key}` instead." 129 | ) 130 | 131 | Map.fetch(data, key) 132 | end 133 | end 134 | 135 | @spec to_options(t(), Keyword.t() | nil) :: options() 136 | @spec to_options(t()) :: options() 137 | def to_options(self, take \\ nil) 138 | 139 | def to_options(self, nil) do 140 | Enum.reduce(self.__set__, [], fn key, acc -> 141 | [{key, Map.get(self, key)} | acc] 142 | end) 143 | end 144 | 145 | def to_options(self, take) do 146 | self 147 | |> to_options() 148 | |> Keyword.take(take) 149 | end 150 | 151 | @spec validate!(options()) :: t() | no_return 152 | def validate!(%__MODULE__{} = opts), do: opts 153 | 154 | def validate!(options) do 155 | Enum.reduce(options, {%__MODULE__{}, @required}, fn {key, value}, acc -> 156 | case validate_option(key, value, acc) do 157 | {:cont, {struct, missing}} -> 158 | {mark_set(struct, key), missing} 159 | 160 | {:halt, {:error, error}} -> 161 | raise error 162 | end 163 | end) 164 | |> case do 165 | {schema, []} -> 166 | schema 167 | 168 | {_schema, missing} -> 169 | raise %Spark.Options.ValidationError{ 170 | key: missing, 171 | message: "Missing required keys: #{inspect(missing)}" 172 | } 173 | end 174 | end 175 | 176 | @spec validate(options) :: {:ok, t()} | {:error, term()} 177 | def validate(%__MODULE__{} = opts), do: {:ok, opts} 178 | 179 | def validate(options) do 180 | Enum.reduce_while(options, {%__MODULE__{}, @required}, fn {key, value}, acc -> 181 | case validate_option(key, value, acc) do 182 | {:cont, {struct, missing}} -> {:cont, {mark_set(struct, key), missing}} 183 | {:halt, {:error, error}} -> {:halt, {:error, error}} 184 | end 185 | end) 186 | |> case do 187 | {:error, error} -> 188 | {:error, error} 189 | 190 | {schema, []} -> 191 | {:ok, schema} 192 | 193 | {_schema, missing} -> 194 | {:error, 195 | %Spark.Options.ValidationError{ 196 | key: missing, 197 | message: "Missing required keys: #{inspect(missing)}" 198 | }} 199 | end 200 | end 201 | end, 202 | quote bind_quoted: [schema: schema] do 203 | @compile {:inline, validate_option: 3, use_key: 2, mark_set: 2, warn_deprecated: 1} 204 | for {key, config} <- Keyword.new(schema) do 205 | type = Macro.escape(config[:type]) 206 | 207 | cond do 208 | config[:private?] -> 209 | defp validate_option(unquote(key), value, {acc, required_fields}) do 210 | {:cont, {acc, required_fields}} 211 | end 212 | 213 | # we can add as many of these as we like to front load validations 214 | type == :integer -> 215 | defp validate_option(unquote(key), value, {acc, required_fields}) 216 | when is_integer(value) do 217 | {:cont, {%{acc | unquote(key) => value}, use_key(required_fields, unquote(key))}} 218 | end 219 | 220 | defp validate_option(unquote(key), value, _) do 221 | {:halt, 222 | {:error, 223 | %Spark.Options.ValidationError{ 224 | key: unquote(key), 225 | message: 226 | "invalid value for #{Spark.Options.render_key(unquote(key))}: expected integer, got: #{inspect(value)}", 227 | value: value 228 | }}} 229 | end 230 | 231 | type == :string -> 232 | defp validate_option(unquote(key), value, {acc, required_fields}) 233 | when is_binary(value) do 234 | {:cont, {%{acc | unquote(key) => value}, use_key(required_fields, unquote(key))}} 235 | end 236 | 237 | defp validate_option(unquote(key), value, _) do 238 | {:halt, 239 | {:error, 240 | %Spark.Options.ValidationError{ 241 | key: unquote(key), 242 | message: 243 | "invalid value for #{Spark.Options.render_key(unquote(key))}: expected integer, got: #{inspect(value)}", 244 | value: value 245 | }}} 246 | end 247 | 248 | true -> 249 | defp validate_option(unquote(key), value, {acc, required_fields}) do 250 | case Spark.Options.validate_single_value(unquote(type), unquote(key), value) do 251 | {:ok, value} -> 252 | {:cont, 253 | {%{acc | unquote(key) => value}, use_key(required_fields, unquote(key))}} 254 | 255 | {:error, error} -> 256 | {:halt, {:error, error}} 257 | end 258 | end 259 | end 260 | end 261 | 262 | defp validate_option(unknown_key, value, _) do 263 | {:halt, 264 | {:error, 265 | %Spark.Options.ValidationError{ 266 | key: unknown_key, 267 | message: 268 | "unknown options #{inspect([unknown_key])}, valid options are: #{inspect(@valid_options)}", 269 | value: value 270 | }}} 271 | end 272 | 273 | for {key, config} <- Keyword.new(schema), !config[:private?] do 274 | if config[:required] do 275 | defp use_key(list, unquote(key)) do 276 | warn_deprecated(unquote(key)) 277 | List.delete(list, unquote(key)) 278 | end 279 | else 280 | defp use_key(list, unquote(key)) do 281 | warn_deprecated(unquote(key)) 282 | 283 | list 284 | end 285 | end 286 | end 287 | 288 | for {key, config} <- Keyword.new(schema), 289 | Keyword.has_key?(config, :default), 290 | !config[:private?] do 291 | defp mark_set(struct, unquote(key)) do 292 | struct 293 | end 294 | end 295 | 296 | defp mark_set(struct, key) do 297 | %{struct | __set__: [key | struct.__set__]} 298 | end 299 | 300 | for {key, config} <- Keyword.new(schema), config[:deprecated] do 301 | defp warn_deprecated(unquote(key)) do 302 | IO.warn("#{unquote(key)} is deprecated") 303 | end 304 | end 305 | 306 | defp warn_deprecated(_key) do 307 | :ok 308 | end 309 | end 310 | ] 311 | end 312 | 313 | @doc false 314 | def validate_schema!(schema) do 315 | if schema[:*] do 316 | raise "Schema with * not supported in validator" 317 | else 318 | Enum.each(schema, fn {_key, config} -> 319 | if config[:type] == :keyword do 320 | # When we support nested keywords, they should get structs 321 | # auto defined for them as well. 322 | raise "Nested keywords not accepted in validators yet" 323 | end 324 | end) 325 | end 326 | end 327 | end 328 | -------------------------------------------------------------------------------- /lib/spark/options_helpers.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.OptionsHelpers do 2 | @moduledoc """ 3 | Helpers for working with options lists. 4 | """ 5 | 6 | @deprecated "Use `Spark.Options.merge/3` instead" 7 | def merge_schemas(left, right, section \\ nil) do 8 | Spark.Options.merge(left, right, section) 9 | end 10 | 11 | @deprecated "Use `Spark.Options.validate/2` instead" 12 | def validate(opts, schema) do 13 | Spark.Options.validate(opts, schema) 14 | end 15 | 16 | @deprecated "Use `Spark.Options.validate!/2` instead" 17 | def validate!(opts, schema) do 18 | Spark.Options.validate!(opts, schema) 19 | end 20 | 21 | @doc """ 22 | Creates markdown documentation for a given schema. 23 | """ 24 | @deprecated "Use `Spark.Options.docs/1` instead" 25 | def docs(schema) do 26 | Spark.Options.docs(schema) 27 | end 28 | 29 | @deprecated "use `Spark.Options.Helpers.make_required!/2`" 30 | def make_required!(options, field) do 31 | Spark.Options.Helpers.make_required!(options, field) 32 | end 33 | 34 | @deprecated "use `Spark.Options.Helpers.make_optional!/2`" 35 | def make_optional!(options, field) do 36 | Spark.Options.Helpers.make_optional!(options, field) 37 | end 38 | 39 | @deprecated "use `Spark.Options.Helpers.set_type!/3`" 40 | def set_type!(options, field, type) do 41 | Spark.Options.Helpers.set_type!(options, field, type) 42 | end 43 | 44 | @deprecated "use `Spark.Options.Helpers.set_default!/3`" 45 | def set_default!(options, field, value) do 46 | Spark.Options.Helpers.set_default!(options, field, value) 47 | end 48 | 49 | @deprecated "use `Spark.Options.Helpers.append_doc!/3`" 50 | def append_doc!(options, field, to_append) do 51 | Spark.Options.Helpers.append_doc!(options, field, to_append) 52 | end 53 | end 54 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule Spark.MixProject do 2 | use Mix.Project 3 | 4 | @version "2.2.63" 5 | 6 | @description "Generic tooling for building DSLs" 7 | 8 | def project do 9 | [ 10 | app: :spark, 11 | version: @version, 12 | elixir: "~> 1.15", 13 | start_permanent: Mix.env() == :prod, 14 | deps: deps(), 15 | package: package(), 16 | docs: docs(), 17 | description: @description, 18 | elixirc_paths: elixirc_paths(Mix.env()), 19 | aliases: aliases(), 20 | source_url: "https://github.com/ash-project/spark", 21 | homepage_url: "https://github.com/ash-project/spark", 22 | dialyzer: [plt_add_apps: [:mix]] 23 | ] 24 | end 25 | 26 | defp elixirc_paths(:test) do 27 | elixirc_paths(:prod) ++ ["test/support"] 28 | end 29 | 30 | defp elixirc_paths(_), do: ["lib"] 31 | 32 | # Run "mix help compile.app" to learn about applications. 33 | def application do 34 | [ 35 | extra_applications: [:logger] 36 | ] 37 | end 38 | 39 | defp docs do 40 | # The main page in the docs 41 | [ 42 | main: "get-started-with-spark", 43 | source_ref: "v#{@version}", 44 | extra_section: "GUIDES", 45 | before_closing_head_tag: fn type -> 46 | if type == :html do 47 | """ 48 | 57 | """ 58 | end 59 | end, 60 | spark: [ 61 | mix_tasks: [ 62 | Formatting: [ 63 | Mix.Tasks.Spark.Formatter 64 | ] 65 | ] 66 | ], 67 | extras: [ 68 | "documentation/how_to/upgrade-to-2.0.md", 69 | "documentation/how_to/writing-extensions.md", 70 | "documentation/how_to/split-up-large-dsls.md", 71 | "documentation/tutorials/get-started-with-spark.md" 72 | ], 73 | groups_for_extras: [ 74 | "How To": ~r/documentation\/how_to/, 75 | Tutorials: ~r/documentation\/tutorials/ 76 | ], 77 | groups_for_modules: [ 78 | "DSLs and Extensions": ~r/^Spark.Dsl/, 79 | Options: ~r/^Spark.Options/, 80 | Errors: [Spark.Error.DslError], 81 | Internals: ~r/.*/ 82 | ] 83 | ] 84 | end 85 | 86 | defp package do 87 | [ 88 | name: :spark, 89 | licenses: ["MIT"], 90 | files: ~w(lib .formatter.exs mix.exs README* LICENSE* 91 | CHANGELOG* documentation), 92 | links: %{ 93 | GitHub: "https://github.com/ash-project/spark" 94 | } 95 | ] 96 | end 97 | 98 | # Run "mix help deps" to learn about dependencies. 99 | defp deps do 100 | [ 101 | {:sourceror, "~> 1.2", optional: true}, 102 | {:jason, "~> 1.4", optional: true}, 103 | {:igniter, "~> 0.2 and >= 0.3.64", optional: true}, 104 | # Dev/Test dependencies 105 | {:benchee, "~> 1.3", only: [:dev, :test]}, 106 | {:eflame, "~> 1.0", only: [:dev, :test], runtime: false}, 107 | {:ex_doc, "~> 0.32", only: [:dev, :test], runtime: false}, 108 | {:ex_check, "~> 0.12", only: [:dev, :test]}, 109 | {:credo, ">= 0.0.0", only: [:dev, :test], runtime: false}, 110 | {:dialyxir, ">= 0.0.0", only: [:dev, :test], runtime: false}, 111 | {:sobelow, ">= 0.0.0", only: [:dev, :test], runtime: false}, 112 | {:git_ops, "~> 2.5", only: [:dev, :test]}, 113 | {:mix_test_watch, "~> 1.0", only: [:dev, :test], runtime: false}, 114 | {:elixir_sense, 115 | github: "zachdaniel/elixir_sense", 116 | only: [:dev, :test, :docs], 117 | ref: "572c81c4046e12857b734abc28aa14b7d1f6f595"}, 118 | {:mix_audit, ">= 0.0.0", only: [:dev, :test], runtime: false} 119 | ] 120 | end 121 | 122 | defp aliases do 123 | [ 124 | sobelow: "sobelow --skip", 125 | credo: "credo --strict" 126 | ] 127 | end 128 | end 129 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "benchee": {:hex, :benchee, "1.4.0", "9f1f96a30ac80bab94faad644b39a9031d5632e517416a8ab0a6b0ac4df124ce", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "299cd10dd8ce51c9ea3ddb74bb150f93d25e968f93e4c1fa31698a8e4fa5d715"}, 3 | "bunt": {:hex, :bunt, "1.0.0", "081c2c665f086849e6d57900292b3a161727ab40431219529f13c4ddcf3e7a44", [:mix], [], "hexpm", "dc5f86aa08a5f6fa6b8096f0735c4e76d54ae5c9fa2c143e5a1fc7c1cd9bb6b5"}, 4 | "credo": {:hex, :credo, "1.7.12", "9e3c20463de4b5f3f23721527fcaf16722ec815e70ff6c60b86412c695d426c1", [:mix], [{:bunt, "~> 0.2.1 or ~> 1.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "8493d45c656c5427d9c729235b99d498bd133421f3e0a683e5c1b561471291e5"}, 5 | "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, 6 | "dialyxir": {:hex, :dialyxir, "1.4.5", "ca1571ac18e0f88d4ab245f0b60fa31ff1b12cbae2b11bd25d207f865e8ae78a", [:mix], [{:erlex, ">= 0.2.7", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "b0fb08bb8107c750db5c0b324fa2df5ceaa0f9307690ee3c1f6ba5b9eb5d35c3"}, 7 | "earmark_parser": {:hex, :earmark_parser, "1.4.44", "f20830dd6b5c77afe2b063777ddbbff09f9759396500cdbe7523efd58d7a339c", [:mix], [], "hexpm", "4778ac752b4701a5599215f7030989c989ffdc4f6df457c5f36938cc2d2a2750"}, 8 | "eflame": {:hex, :eflame, "1.0.1", "0664d287e39eef3c413749254b3af5f4f8b00be71c1af67d325331c4890be0fc", [:mix], [], "hexpm", "e0b08854a66f9013129de0b008488f3411ae9b69b902187837f994d7a99cf04e"}, 9 | "elixir_sense": {:git, "https://github.com/zachdaniel/elixir_sense.git", "572c81c4046e12857b734abc28aa14b7d1f6f595", [ref: "572c81c4046e12857b734abc28aa14b7d1f6f595"]}, 10 | "erlex": {:hex, :erlex, "0.2.7", "810e8725f96ab74d17aac676e748627a07bc87eb950d2b83acd29dc047a30595", [:mix], [], "hexpm", "3ed95f79d1a844c3f6bf0cea61e0d5612a42ce56da9c03f01df538685365efb0"}, 11 | "ex_check": {:hex, :ex_check, "0.16.0", "07615bef493c5b8d12d5119de3914274277299c6483989e52b0f6b8358a26b5f", [:mix], [], "hexpm", "4d809b72a18d405514dda4809257d8e665ae7cf37a7aee3be6b74a34dec310f5"}, 12 | "ex_doc": {:hex, :ex_doc, "0.38.2", "504d25eef296b4dec3b8e33e810bc8b5344d565998cd83914ffe1b8503737c02", [:mix], [{:earmark_parser, "~> 1.4.44", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "732f2d972e42c116a70802f9898c51b54916e542cc50968ac6980512ec90f42b"}, 13 | "file_system": {:hex, :file_system, "1.1.0", "08d232062284546c6c34426997dd7ef6ec9f8bbd090eb91780283c9016840e8f", [:mix], [], "hexpm", "bfcf81244f416871f2a2e15c1b515287faa5db9c6bcf290222206d120b3d43f6"}, 14 | "finch": {:hex, :finch, "0.19.0", "c644641491ea854fc5c1bbaef36bfc764e3f08e7185e1f084e35e0672241b76d", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "fc5324ce209125d1e2fa0fcd2634601c52a787aff1cd33ee833664a5af4ea2b6"}, 15 | "git_cli": {:hex, :git_cli, "0.3.0", "a5422f9b95c99483385b976f5d43f7e8233283a47cda13533d7c16131cb14df5", [:mix], [], "hexpm", "78cb952f4c86a41f4d3511f1d3ecb28edb268e3a7df278de2faa1bd4672eaf9b"}, 16 | "git_ops": {:hex, :git_ops, "2.8.0", "29ac9ab68bf9645973cb2752047b987e75cbd3d9761489c615e3ba80018fa885", [:mix], [{:git_cli, "~> 0.2", [hex: :git_cli, repo: "hexpm", optional: false]}, {:igniter, ">= 0.5.27 and < 1.0.0-0", [hex: :igniter, repo: "hexpm", optional: true]}, {:nimble_parsec, "~> 1.0", [hex: :nimble_parsec, repo: "hexpm", optional: false]}, {:req, "~> 0.5", [hex: :req, repo: "hexpm", optional: false]}], "hexpm", "b535e4ad6b5d13e14c455e76f65825659081b5530b0827eb0232d18719530eec"}, 17 | "glob_ex": {:hex, :glob_ex, "0.1.11", "cb50d3f1ef53f6ca04d6252c7fde09fd7a1cf63387714fe96f340a1349e62c93", [:mix], [], "hexpm", "342729363056e3145e61766b416769984c329e4378f1d558b63e341020525de4"}, 18 | "hpax": {:hex, :hpax, "1.0.3", "ed67ef51ad4df91e75cc6a1494f851850c0bd98ebc0be6e81b026e765ee535aa", [:mix], [], "hexpm", "8eab6e1cfa8d5918c2ce4ba43588e894af35dbd8e91e6e55c817bca5847df34a"}, 19 | "igniter": {:hex, :igniter, "0.6.6", "82d707a2419a95e6ea115949c68a9113dfc0b4802d3d8386aa351feb0ead71ee", [:mix], [{:glob_ex, "~> 0.1.7", [hex: :glob_ex, repo: "hexpm", optional: false]}, {:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:owl, "~> 0.11", [hex: :owl, repo: "hexpm", optional: false]}, {:phx_new, "~> 1.7", [hex: :phx_new, repo: "hexpm", optional: true]}, {:req, "~> 0.5", [hex: :req, repo: "hexpm", optional: false]}, {:rewrite, ">= 1.1.1 and < 2.0.0-0", [hex: :rewrite, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.4", [hex: :sourceror, repo: "hexpm", optional: false]}, {:spitfire, ">= 0.1.3 and < 1.0.0-0", [hex: :spitfire, repo: "hexpm", optional: false]}], "hexpm", "a85832987fc78f5fdc38f628a62acfd50b4e441166496fea15c7b05218fa84f5"}, 20 | "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"}, 21 | "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"}, 22 | "makeup_elixir": {:hex, :makeup_elixir, "1.0.1", "e928a4f984e795e41e3abd27bfc09f51db16ab8ba1aebdba2b3a575437efafc2", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "7284900d412a3e5cfd97fdaed4f5ed389b8f2b4cb49efc0eb3bd10e2febf9507"}, 23 | "makeup_erlang": {:hex, :makeup_erlang, "1.0.2", "03e1804074b3aa64d5fad7aa64601ed0fb395337b982d9bcf04029d68d51b6a7", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "af33ff7ef368d5893e4a267933e7744e46ce3cf1f61e2dccf53a111ed3aa3727"}, 24 | "mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"}, 25 | "mint": {:hex, :mint, "1.7.1", "113fdb2b2f3b59e47c7955971854641c61f378549d73e829e1768de90fc1abf1", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "fceba0a4d0f24301ddee3024ae116df1c3f4bb7a563a731f45fdfeb9d39a231b"}, 26 | "mix_audit": {:hex, :mix_audit, "2.1.4", "0a23d5b07350cdd69001c13882a4f5fb9f90fbd4cbf2ebc190a2ee0d187ea3e9", [:make, :mix], [{:jason, "~> 1.4", [hex: :jason, repo: "hexpm", optional: false]}, {:yaml_elixir, "~> 2.11", [hex: :yaml_elixir, repo: "hexpm", optional: false]}], "hexpm", "fd807653cc8c1cada2911129c7eb9e985e3cc76ebf26f4dd628bb25bbcaa7099"}, 27 | "mix_test_watch": {:hex, :mix_test_watch, "1.3.0", "2ffc9f72b0d1f4ecf0ce97b044e0e3c607c3b4dc21d6228365e8bc7c2856dc77", [:mix], [{:file_system, "~> 0.2 or ~> 1.0", [hex: :file_system, repo: "hexpm", optional: false]}], "hexpm", "f9e5edca976857ffac78632e635750d158df14ee2d6185a15013844af7570ffe"}, 28 | "nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"}, 29 | "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"}, 30 | "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"}, 31 | "owl": {:hex, :owl, "0.12.2", "65906b525e5c3ef51bab6cba7687152be017aebe1da077bb719a5ee9f7e60762", [:mix], [{:ucwidth, "~> 0.2", [hex: :ucwidth, repo: "hexpm", optional: true]}], "hexpm", "6398efa9e1fea70a04d24231e10dcd66c1ac1aa2da418d20ef5357ec61de2880"}, 32 | "req": {:hex, :req, "0.5.10", "a3a063eab8b7510785a467f03d30a8d95f66f5c3d9495be3474b61459c54376c", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "8a604815743f8a2d3b5de0659fa3137fa4b1cffd636ecb69b30b2b9b2c2559be"}, 33 | "rewrite": {:hex, :rewrite, "1.1.2", "f5a5d10f5fed1491a6ff48e078d4585882695962ccc9e6c779bae025d1f92eda", [:mix], [{:glob_ex, "~> 0.1", [hex: :glob_ex, repo: "hexpm", optional: false]}, {:sourceror, "~> 1.0", [hex: :sourceror, repo: "hexpm", optional: false]}, {:text_diff, "~> 0.1", [hex: :text_diff, repo: "hexpm", optional: false]}], "hexpm", "7f8b94b1e3528d0a47b3e8b7bfeca559d2948a65fa7418a9ad7d7712703d39d4"}, 34 | "sobelow": {:hex, :sobelow, "0.14.0", "dd82aae8f72503f924fe9dd97ffe4ca694d2f17ec463dcfd365987c9752af6ee", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "7ecf91e298acfd9b24f5d761f19e8f6e6ac585b9387fb6301023f1f2cd5eed5f"}, 35 | "sourceror": {:hex, :sourceror, "1.10.0", "38397dedbbc286966ec48c7af13e228b171332be1ad731974438c77791945ce9", [:mix], [], "hexpm", "29dbdfc92e04569c9d8e6efdc422fc1d815f4bd0055dc7c51b8800fb75c4b3f1"}, 36 | "spitfire": {:hex, :spitfire, "0.2.1", "29e154873f05444669c7453d3d931820822cbca5170e88f0f8faa1de74a79b47", [:mix], [], "hexpm", "6eeed75054a38341b2e1814d41bb0a250564092358de2669fdb57ff88141d91b"}, 37 | "statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"}, 38 | "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"}, 39 | "text_diff": {:hex, :text_diff, "0.1.0", "1caf3175e11a53a9a139bc9339bd607c47b9e376b073d4571c031913317fecaa", [:mix], [], "hexpm", "d1ffaaecab338e49357b6daa82e435f877e0649041ace7755583a0ea3362dbd7"}, 40 | "yamerl": {:hex, :yamerl, "0.10.0", "4ff81fee2f1f6a46f1700c0d880b24d193ddb74bd14ef42cb0bcf46e81ef2f8e", [:rebar3], [], "hexpm", "346adb2963f1051dc837a2364e4acf6eb7d80097c0f53cbdc3046ec8ec4b4e6e"}, 41 | "yaml_elixir": {:hex, :yaml_elixir, "2.11.0", "9e9ccd134e861c66b84825a3542a1c22ba33f338d82c07282f4f1f52d847bd50", [:mix], [{:yamerl, "~> 0.10", [hex: :yamerl, repo: "hexpm", optional: false]}], "hexpm", "53cc28357ee7eb952344995787f4bb8cc3cecbf189652236e9b163e8ce1bc242"}, 42 | } 43 | -------------------------------------------------------------------------------- /test/add_extension_test.exs: -------------------------------------------------------------------------------- 1 | defmodule AddExtensionTest do 2 | use ExUnit.Case 3 | 4 | test "extensions can add other extensions" do 5 | defmodule Ext0 do 6 | @moduledoc false 7 | use Spark.Dsl.Extension, 8 | sections: [%Spark.Dsl.Section{name: :ext0, schema: [name: [type: :atom]]}] 9 | end 10 | 11 | defmodule Ext1 do 12 | @moduledoc false 13 | use Spark.Dsl.Extension, 14 | sections: [%Spark.Dsl.Section{name: :ext1, schema: [name: [type: :atom]]}], 15 | add_extensions: [Ext0] 16 | end 17 | 18 | defmodule Dsl do 19 | @moduledoc false 20 | use Spark.Dsl, default_extensions: [extensions: Ext1] 21 | end 22 | 23 | defmodule Example do 24 | @moduledoc false 25 | use Dsl 26 | 27 | ext0 do 28 | name(:ext0) 29 | end 30 | 31 | ext1 do 32 | name(:ext1) 33 | end 34 | end 35 | 36 | assert Dsl.default_extensions() == [Ext0, Ext1] 37 | assert Spark.Dsl.Extension.get_persisted(Example, :extensions) == [Ext1, Ext0] 38 | assert Spark.Dsl.Extension.get_opt(Example, [:ext1], :name) == :ext1 39 | assert Spark.Dsl.Extension.get_opt(Example, [:ext0], :name) == :ext0 40 | end 41 | end 42 | -------------------------------------------------------------------------------- /test/code_helpers_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Spark.CodeHelpersTest do 2 | use ExUnit.Case 3 | 4 | import Spark.CodeHelpers 5 | 6 | test "it generates functions properly when `when` is used in a multi-clausal function" do 7 | {code, funs} = 8 | lift_functions( 9 | quote do 10 | fn 11 | %{context: %{error: error}} = changeset, _context when not is_nil(error) -> 12 | {:error, error} 13 | 14 | changeset, _context -> 15 | changeset 16 | end 17 | end, 18 | :key, 19 | __ENV__ 20 | ) 21 | 22 | {:module, module, _, _} = 23 | Module.create( 24 | Foo, 25 | quote do 26 | unquote(funs) 27 | 28 | def fun, do: unquote(code) 29 | end, 30 | Macro.Env.location(__ENV__) 31 | ) 32 | 33 | assert :erlang.fun_info(module.fun())[:arity] == 2 34 | end 35 | end 36 | -------------------------------------------------------------------------------- /test/cross_extension_recursive_patch_test.exs: -------------------------------------------------------------------------------- 1 | defmodule CrossExtensionPatchTest do 2 | use ExUnit.Case 3 | 4 | defmodule Template do 5 | @moduledoc false 6 | defstruct name: nil, type: nil 7 | 8 | def input(name) do 9 | %__MODULE__{name: name, type: :input} 10 | end 11 | end 12 | 13 | defmodule Argument do 14 | @moduledoc false 15 | defstruct name: nil, value: nil 16 | 17 | def entity do 18 | %Spark.Dsl.Entity{ 19 | name: :argument, 20 | args: [:name, :value], 21 | imports: [Template], 22 | target: __MODULE__, 23 | schema: [ 24 | name: [type: :atom, required: true], 25 | value: [type: {:struct, Template}, required: true] 26 | ] 27 | } 28 | end 29 | end 30 | 31 | defmodule Step do 32 | @moduledoc false 33 | defstruct name: nil, steps: [], arguments: [] 34 | 35 | def entity(name) do 36 | %Spark.Dsl.Entity{ 37 | name: name, 38 | args: [:name], 39 | entities: [ 40 | steps: [], 41 | arguments: [Argument.entity()] 42 | ], 43 | recursive_as: :steps, 44 | target: __MODULE__, 45 | schema: [ 46 | name: [type: :atom, required: true] 47 | ] 48 | } 49 | end 50 | end 51 | 52 | defmodule Input do 53 | @moduledoc false 54 | defstruct name: nil 55 | 56 | def entity do 57 | %Spark.Dsl.Entity{ 58 | name: :input, 59 | args: [:name], 60 | target: __MODULE__, 61 | schema: [ 62 | name: [type: :atom, required: true] 63 | ] 64 | } 65 | end 66 | end 67 | 68 | defmodule ExtensionA do 69 | @moduledoc false 70 | use Spark.Dsl.Extension, 71 | sections: [ 72 | %Spark.Dsl.Section{ 73 | name: :base, 74 | top_level?: true, 75 | patchable?: true, 76 | entities: [Step.entity(:step_a), Input.entity()] 77 | } 78 | ] 79 | end 80 | 81 | defmodule Imports do 82 | def foo(), do: 10 83 | end 84 | 85 | defmodule ExtensionB do 86 | @moduledoc false 87 | use Spark.Dsl.Extension, 88 | dsl_patches: [ 89 | %Spark.Dsl.Patch.AddEntity{section_path: [:base], entity: Step.entity(:step_b)} 90 | ] 91 | end 92 | 93 | defmodule Dsl do 94 | @moduledoc false 95 | use Spark.Dsl, default_extensions: [extensions: [ExtensionA, ExtensionB]] 96 | end 97 | 98 | defmodule ExtensionC do 99 | use Spark.Dsl.Extension, 100 | imports: [Imports] 101 | end 102 | 103 | test "fragments include imports from extensions" do 104 | defmodule ImportsFragments do 105 | use Spark.Dsl.Fragment, of: Dsl, extensions: [ExtensionC] 106 | 107 | 10 = foo() 108 | end 109 | end 110 | 111 | test "non patched entities are recursive" do 112 | defmodule NonPatchedRecursiveEntity do 113 | @moduledoc false 114 | use Dsl 115 | 116 | step_a :outer do 117 | step_a(:inner) 118 | end 119 | end 120 | 121 | assert [%{name: :outer, steps: [%{name: :inner}]}] = 122 | NonPatchedRecursiveEntity 123 | |> Spark.Dsl.Extension.get_entities([:base]) 124 | end 125 | 126 | test "patched entities are recursive" do 127 | defmodule PatchedRecursiveEntity do 128 | @moduledoc false 129 | use Dsl 130 | 131 | step_b :outer do 132 | step_b(:inner) 133 | end 134 | end 135 | 136 | assert [%{name: :outer, steps: [%{name: :inner}]}] = 137 | PatchedRecursiveEntity 138 | |> Spark.Dsl.Extension.get_entities([:base]) 139 | end 140 | 141 | test "patched entities can recurse into non-patched entities" do 142 | defmodule CrossExtensionPatchedEntityRecursion do 143 | @moduledoc false 144 | use Dsl 145 | 146 | step_b :outer do 147 | step_a(:inner) 148 | end 149 | end 150 | 151 | assert [%{name: :outer, steps: [%{name: :inner}]}] = 152 | CrossExtensionPatchedEntityRecursion 153 | |> Spark.Dsl.Extension.get_entities([:base]) 154 | end 155 | 156 | test "patched entities with non-conflicting imports" do 157 | defmodule NonConflictingImports do 158 | @moduledoc false 159 | use Dsl 160 | 161 | input(:fruit) 162 | 163 | step_a :a do 164 | argument(:arg, input(:fruit)) 165 | end 166 | 167 | step_b :b do 168 | argument(:arg, input(:bun)) 169 | end 170 | end 171 | 172 | [%{value: %Template{name: :fruit}}, %{value: %Template{name: :bun}}] = 173 | NonConflictingImports 174 | |> Spark.Dsl.Extension.get_entities([:base]) 175 | |> Enum.filter(&is_struct(&1, Step)) 176 | |> Enum.flat_map(& &1.arguments) 177 | end 178 | end 179 | -------------------------------------------------------------------------------- /test/elixir_sense/plugin_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Spark.ElixirSense.PluginTest do 2 | use ExUnit.Case 3 | 4 | def cursors(text) do 5 | {_, cursors} = 6 | ElixirSense.Core.Source.walk_text(text, {false, []}, fn 7 | "#", rest, _, _, {_comment?, cursors} -> 8 | {rest, {true, cursors}} 9 | 10 | "\n", rest, _, _, {_comment?, cursors} -> 11 | {rest, {false, cursors}} 12 | 13 | "^", rest, line, col, {true, cursors} -> 14 | {rest, {true, [%{line: line - 1, col: col} | cursors]}} 15 | 16 | _, rest, _, _, acc -> 17 | {rest, acc} 18 | end) 19 | 20 | Enum.reverse(cursors) 21 | end 22 | 23 | def suggestions(buffer, cursor) do 24 | ElixirSense.suggestions(buffer, cursor.line, cursor.col) 25 | end 26 | 27 | def suggestions(buffer, cursor, type) do 28 | suggestions(buffer, cursor) 29 | |> Enum.filter(fn s -> s.type == type end) 30 | end 31 | 32 | def suggestions_by_kind(buffer, cursor, kind) do 33 | suggestions(buffer, cursor) 34 | |> Enum.filter(fn s -> s[:kind] == kind end) 35 | end 36 | 37 | test "suggesting patched entities" do 38 | buffer = """ 39 | defmodule MartyMcfly do 40 | use Spark.Test.Contact, 41 | extensions: [Spark.Test.ContactPatcher] 42 | 43 | presets do 44 | spec 45 | # ^ 46 | end 47 | end 48 | """ 49 | 50 | [cursor] = cursors(buffer) 51 | 52 | result = suggestions(buffer, cursor) 53 | 54 | assert [ 55 | %{ 56 | detail: "Dsl Entity", 57 | documentation: "", 58 | kind: :function, 59 | label: "special_preset", 60 | snippet: "special_preset ${1:name}", 61 | type: :generic 62 | } 63 | ] = result 64 | end 65 | 66 | test "suggesting patched entities two levels deep" do 67 | buffer = """ 68 | defmodule MartyMcfly do 69 | use Spark.Test.Contact, 70 | extensions: [Spark.Test.ContactPatcher] 71 | 72 | presets do 73 | nested_preset do 74 | 75 | # ^ 76 | end 77 | end 78 | end 79 | """ 80 | 81 | [cursor] = cursors(buffer) 82 | 83 | result = suggestions(buffer, cursor) 84 | 85 | assert [ 86 | %{ 87 | detail: "Option", 88 | documentation: "", 89 | kind: :function, 90 | label: "name", 91 | snippet: "name :$0", 92 | type: :generic 93 | }, 94 | %{ 95 | label: "special_preset", 96 | type: :generic, 97 | kind: :function, 98 | detail: "Dsl Entity", 99 | documentation: "", 100 | snippet: "special_preset ${1:name}" 101 | } 102 | ] = result 103 | end 104 | 105 | test "suggesting options inside of patched entities" do 106 | buffer = """ 107 | defmodule MartyMcfly do 108 | use Spark.Test.Contact, 109 | extensions: [Spark.Test.ContactPatcher] 110 | 111 | presets do 112 | special_preset :thing do 113 | f 114 | # ^ 115 | end 116 | end 117 | end 118 | """ 119 | 120 | [cursor] = cursors(buffer) 121 | 122 | result = suggestions(buffer, cursor) 123 | 124 | assert [ 125 | %{ 126 | detail: "Option", 127 | documentation: "", 128 | kind: :function, 129 | label: "foo", 130 | snippet: "foo :$0", 131 | type: :generic 132 | } 133 | ] = result 134 | end 135 | 136 | test "entity snippets are correctly shown" do 137 | buffer = """ 138 | defmodule DocBrown do 139 | use Spark.Test.Contact 140 | 141 | presets do 142 | preset_with_snip 143 | # ^ 144 | end 145 | end 146 | """ 147 | 148 | [cursor] = cursors(buffer) 149 | result = suggestions(buffer, cursor) 150 | 151 | assert [%{snippet: "preset_with_snippet ${1::doc_brown}"}] = result 152 | end 153 | 154 | test "entity snippets are correctly shown when parenthesis are involved" do 155 | buffer = """ 156 | defmodule DocBrown do 157 | use Spark.Test.Contact 158 | 159 | presets do 160 | preset_with_snippet :foo, bar() do 161 | 162 | # ^ 163 | end 164 | end 165 | end 166 | """ 167 | 168 | [cursor] = cursors(buffer) 169 | 170 | assert [ 171 | %{ 172 | label: "thing", 173 | type: :generic, 174 | kind: :function, 175 | detail: "Option", 176 | documentation: "", 177 | snippet: "thing \"$0\"" 178 | } 179 | ] = Enum.take(suggestions(buffer, cursor), 1) 180 | end 181 | 182 | test "entity snippets are correctly shown when parenthesis are involved, using options" do 183 | buffer = """ 184 | defmodule DocBrown do 185 | use Spark.Test.Contact 186 | 187 | presets do 188 | preset_with_snippet :foo, bar(), 189 | # ^ 190 | end 191 | end 192 | end 193 | """ 194 | 195 | [cursor] = cursors(buffer) 196 | 197 | assert [ 198 | %{ 199 | label: "thing", 200 | type: :generic, 201 | kind: :function, 202 | detail: "Option", 203 | documentation: "", 204 | snippet: "thing: \"$0\"" 205 | } 206 | ] = Enum.take(suggestions(buffer, cursor), 1) 207 | end 208 | 209 | test "entity snippets show arguments before options" do 210 | buffer = """ 211 | defmodule DocBrown do 212 | use Spark.Test.Contact 213 | 214 | presets do 215 | preset_with_fn_arg :foo, 216 | # ^ 217 | end 218 | end 219 | end 220 | """ 221 | 222 | [cursor] = cursors(buffer) 223 | 224 | assert [ 225 | %{ 226 | label: "ExampleContacter", 227 | type: :generic, 228 | kind: :class, 229 | insert_text: "ExampleContacter", 230 | detail: "Spark.Test.Contact.Contacter", 231 | documentation: "" 232 | }, 233 | %{ 234 | args: "", 235 | arity: 0, 236 | name: "example", 237 | type: :function, 238 | origin: "Spark.Test.ContacterBuiltins", 239 | spec: "", 240 | metadata: %{app: :spark}, 241 | args_list: [], 242 | summary: "", 243 | snippet: nil, 244 | def_arity: 0, 245 | visibility: :public, 246 | needed_import: nil, 247 | needed_require: nil 248 | } 249 | ] = Enum.sort(suggestions(buffer, cursor)) 250 | end 251 | 252 | describe "using opts" do 253 | test "opts to `__using__` are autocompleted" do 254 | buffer = """ 255 | defmodule DocBrown do 256 | use Spark.Test.Contact, otp_ 257 | # ^ 258 | end 259 | """ 260 | 261 | [cursor] = cursors(buffer) 262 | 263 | assert [ 264 | %{ 265 | label: "otp_app", 266 | type: :generic, 267 | kind: :function, 268 | snippet: "otp_app: :$0", 269 | detail: "Option", 270 | documentation: "The otp_app to use for any application configurable options" 271 | } 272 | ] = suggestions(buffer, cursor) 273 | end 274 | 275 | test "opts to `__using__` are autocompleted in other formats" do 276 | buffer = """ 277 | defmodule DocBrown do 278 | use Spark.Test.Contact, 279 | thing: :foo, 280 | otp_ 281 | # ^ 282 | end 283 | """ 284 | 285 | [cursor] = cursors(buffer) 286 | 287 | assert [ 288 | %{ 289 | label: "otp_app", 290 | type: :generic, 291 | kind: :function, 292 | snippet: "otp_app: :$0", 293 | detail: "Option", 294 | documentation: "The otp_app to use for any application configurable options" 295 | } 296 | ] = suggestions(buffer, cursor) 297 | end 298 | 299 | defmodule Foo do 300 | use Spark.Dsl.Extension 301 | end 302 | 303 | # This test is flaky, no time to figure out why, but it works in practice 304 | # test "opts to `__using__` are autocompleted with value types" do 305 | # buffer = """ 306 | # defmodule DocBrown do 307 | # use Spark.Test.Contact, extensions: [] 308 | # # ^ 309 | # end 310 | # """ 311 | 312 | # [cursor] = cursors(buffer) 313 | 314 | # # depending on test order, some extensions are loaded and some arent. 315 | # for extension <- [ 316 | # MyExtension, 317 | # Spark.Test.ContactPatcher, 318 | # Spark.Test.Recursive.Dsl, 319 | # Spark.Test.TopLevelDsl, 320 | # Spark.Test.Contact.Dsl 321 | # ] do 322 | # Code.ensure_compiled!(extension) 323 | # extension.sections() |> IO.inspect() 324 | 325 | # suggestions = 326 | # buffer 327 | # |> suggestions(cursor) 328 | # |> Enum.map(& &1.insert_text) 329 | # |> Enum.take(4) 330 | # |> Enum.sort() 331 | 332 | # assert inspect(extension) in suggestions 333 | # end 334 | # end 335 | end 336 | 337 | describe "function that accepts a spark option schema" do 338 | test "it autocompletes the opts" do 339 | buffer = """ 340 | ExampleOptions.func(1, 2, opt 341 | # ^ 342 | """ 343 | 344 | [cursor] = cursors(buffer) 345 | 346 | assert [ 347 | %{ 348 | label: "option", 349 | type: :generic, 350 | kind: :function, 351 | snippet: "option: \"$0\"", 352 | detail: "Option", 353 | documentation: "An option" 354 | } 355 | ] = suggestions(buffer, cursor) 356 | end 357 | 358 | test "it autocompletes the opts when piping" do 359 | buffer = """ 360 | 1 361 | |> ExampleOptions.func(2, 362 | opt 363 | # ^ 364 | """ 365 | 366 | [cursor] = cursors(buffer) 367 | 368 | assert [ 369 | %{ 370 | label: "option", 371 | type: :generic, 372 | kind: :function, 373 | snippet: "option: \"$0\"", 374 | detail: "Option", 375 | documentation: "An option" 376 | } 377 | ] = suggestions(buffer, cursor) 378 | end 379 | 380 | test "it ignores maps" do 381 | buffer = """ 382 | ExampleOptions.func(1, 2, %{opt 383 | # ^ 384 | """ 385 | 386 | [cursor] = cursors(buffer) 387 | 388 | assert [] = suggestions(buffer, cursor) 389 | end 390 | end 391 | end 392 | -------------------------------------------------------------------------------- /test/formatter_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.FormatterTest do 2 | use ExUnit.Case, async: false 3 | 4 | setup do 5 | on_exit(fn -> 6 | Application.delete_env(:spark, :formatter) 7 | end) 8 | 9 | opts = [ 10 | locals_without_parens: [ 11 | first_name: 1, 12 | street: 1, 13 | last_name: 1 14 | ] 15 | ] 16 | 17 | %{opts: opts} 18 | end 19 | 20 | defp config_contact(config, mod \\ "Spark.Test.Contact") do 21 | Application.put_env(:spark, :formatter, [{mod, config}, remove_parens?: true]) 22 | end 23 | 24 | test "it reorders sections properly", %{opts: opts} do 25 | config_contact(section_order: [:address, :personal_details]) 26 | 27 | assert Spark.Formatter.format( 28 | """ 29 | defmodule IncredibleHulk do 30 | use Spark.Test.Contact 31 | 32 | personal_details do 33 | first_name("Incredible") 34 | last_name("Hulk") 35 | end 36 | 37 | address do 38 | street("Avenger Lane") 39 | end 40 | end 41 | """, 42 | opts 43 | ) == """ 44 | defmodule IncredibleHulk do 45 | use Spark.Test.Contact 46 | 47 | address do 48 | street "Avenger Lane" 49 | end 50 | 51 | personal_details do 52 | first_name "Incredible" 53 | last_name "Hulk" 54 | end 55 | end 56 | """ 57 | end 58 | 59 | test "it reorders sections properly when using a base resource", %{opts: opts} do 60 | config_contact( 61 | [type: Spark.Test.Contact, section_order: [:address, :personal_details]], 62 | "Base" 63 | ) 64 | 65 | assert Spark.Formatter.format( 66 | """ 67 | defmodule IncredibleHulk do 68 | use Base 69 | 70 | personal_details do 71 | first_name("Incredible") 72 | last_name("Hulk") 73 | end 74 | 75 | address do 76 | street("Avenger Lane") 77 | end 78 | end 79 | """, 80 | opts 81 | ) == """ 82 | defmodule IncredibleHulk do 83 | use Base 84 | 85 | address do 86 | street "Avenger Lane" 87 | end 88 | 89 | personal_details do 90 | first_name "Incredible" 91 | last_name "Hulk" 92 | end 93 | end 94 | """ 95 | end 96 | end 97 | -------------------------------------------------------------------------------- /test/igniter_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Spark.IgniterTest do 2 | use ExUnit.Case 3 | 4 | import Igniter.Test 5 | 6 | test "options are found in DSLs" do 7 | assert {_igniter, {:ok, Bar.Baz}} = 8 | test_project() 9 | |> Igniter.Project.Module.create_module(TedDansen, """ 10 | use Spark.Test.Contact 11 | 12 | contact do 13 | module(Bar.Baz) 14 | end 15 | """) 16 | |> Spark.Igniter.get_option(TedDansen, [:contact, :module]) 17 | end 18 | 19 | test "options are found in fragments" do 20 | assert {_igniter, {:ok, "foobar"}} = 21 | test_project() 22 | |> Igniter.Project.Module.create_module(TedDansenFragment, """ 23 | @moduledoc false 24 | use Spark.Dsl.Fragment, of: Spark.Test.Contact 25 | 26 | address do 27 | street("foobar") 28 | end 29 | """) 30 | |> Igniter.Project.Module.create_module(TedDansen, """ 31 | use Spark.Test.Contact, fragments: [TedDansenFragment] 32 | 33 | contact do 34 | module(Bar.Baz) 35 | end 36 | """) 37 | |> Spark.Igniter.get_option(TedDansen, [:address, :street]) 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /test/options/impl_validator_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Spark.Options.ImplValidatorTest do 2 | use ExUnit.Case 3 | require Spark.Options.Validator 4 | 5 | defmodule MySchema do 6 | @schema [ 7 | foo: [ 8 | type: {:impl, Enumerable} 9 | ], 10 | bar: [ 11 | type: {:impl, Integer} 12 | ], 13 | baz: [ 14 | type: {:impl, ThisDoesNotReallyExist} 15 | ] 16 | ] 17 | 18 | use Spark.Options.Validator, schema: @schema, define_deprecated_access?: true 19 | end 20 | 21 | describe "impl option" do 22 | test "accepts a module for which a protocol is implemented" do 23 | MySchema.validate!(foo: List) 24 | end 25 | 26 | test "rejects a module for which a protocol is not implemented" do 27 | assert_raise( 28 | Spark.Options.ValidationError, 29 | "protocol Enumerable is not implemented by Enum", 30 | fn -> 31 | MySchema.validate!(foo: Enum) 32 | end 33 | ) 34 | end 35 | 36 | test "rejects module that is not a protocol" do 37 | assert_raise(Spark.Options.ValidationError, "Integer is not a protocol", fn -> 38 | MySchema.validate!(bar: Enum) 39 | end) 40 | end 41 | 42 | test "rejects module that does not exist" do 43 | assert_raise( 44 | Spark.Options.ValidationError, 45 | "protocol Enumerable is not implemented by ThisDoesNotReallyExist", 46 | fn -> 47 | MySchema.validate!(foo: ThisDoesNotReallyExist) 48 | end 49 | ) 50 | end 51 | 52 | test "rejects protocol name that is not a protocol" do 53 | assert_raise( 54 | Spark.Options.ValidationError, 55 | "ThisDoesNotReallyExist is not a protocol", 56 | fn -> 57 | MySchema.validate!(baz: Enum) 58 | end 59 | ) 60 | end 61 | end 62 | end 63 | -------------------------------------------------------------------------------- /test/options/mixed_list_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Spark.Options.MixedListTypeTest do 2 | @moduledoc false 3 | 4 | use ExUnit.Case 5 | 6 | require Spark.Options.Validator 7 | 8 | defmodule MyMixedListSchema do 9 | @option_spec [ 10 | foo: [ 11 | enabled?: [type: :boolean, required: true] 12 | ], 13 | bar: [ 14 | enabled?: [type: :boolean, required: false] 15 | ], 16 | baz: [ 17 | enabled?: [type: :boolean, required: false] 18 | ] 19 | ] 20 | 21 | @literals @option_spec |> Keyword.keys() |> Enum.map(&{:literal, &1}) 22 | @tuples @option_spec 23 | |> Enum.map(fn {key, spec} -> 24 | {:tuple, [{:literal, key}, {:keyword_list, spec}]} 25 | end) 26 | 27 | @schema [ 28 | schema: [ 29 | type: {:list, {:or, @literals ++ @tuples}} 30 | ] 31 | ] 32 | 33 | use Spark.Options.Validator, schema: @schema 34 | end 35 | 36 | describe "mixed list types" do 37 | test "can use only atoms" do 38 | MyMixedListSchema.validate!(schema: [:foo]) 39 | MyMixedListSchema.validate!(schema: [:foo, :bar]) 40 | MyMixedListSchema.validate!(schema: [:foo, :bar, :baz]) 41 | end 42 | 43 | test "can use only keywords" do 44 | MyMixedListSchema.validate!( 45 | schema: [ 46 | foo: [enabled?: true] 47 | ] 48 | ) 49 | 50 | MyMixedListSchema.validate!( 51 | schema: [ 52 | foo: [enabled?: true], 53 | bar: [enabled?: false] 54 | ] 55 | ) 56 | 57 | MyMixedListSchema.validate!( 58 | schema: [ 59 | foo: [enabled?: true], 60 | bar: [enabled?: false], 61 | baz: [enabled?: false] 62 | ] 63 | ) 64 | 65 | MyMixedListSchema.validate!( 66 | schema: [ 67 | foo: [enabled?: true], 68 | bar: [], 69 | baz: [enabled?: false] 70 | ] 71 | ) 72 | end 73 | 74 | test "can mix atoms and keywords" do 75 | MyMixedListSchema.validate!(schema: [:foo, bar: [enabled?: true]]) 76 | MyMixedListSchema.validate!(schema: [:foo, bar: [enabled?: true], baz: [enabled?: true]]) 77 | MyMixedListSchema.validate!(schema: [:foo, :baz, bar: [enabled?: true]]) 78 | end 79 | end 80 | end 81 | -------------------------------------------------------------------------------- /test/options/validator_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Spark.Options.ValidatorTest do 2 | use ExUnit.Case 3 | require Spark.Options.Validator 4 | 5 | defmodule MySchema do 6 | @schema [ 7 | foo: [ 8 | type: :string, 9 | required: true 10 | ], 11 | bar: [ 12 | type: :string, 13 | default: "default" 14 | ], 15 | baz: [ 16 | type: :integer, 17 | default: 10 18 | ], 19 | buz: [ 20 | type: {:custom, __MODULE__, :anything, []} 21 | ] 22 | ] 23 | 24 | def anything(value), do: {:ok, value} 25 | 26 | use Spark.Options.Validator, schema: @schema, define_deprecated_access?: true 27 | end 28 | 29 | describe "definition" do 30 | test "it defines a struct" do 31 | assert MySchema.__struct__() 32 | end 33 | 34 | test "the keys are the same as the schema keys" do 35 | assert Map.keys(MySchema.__struct__()) -- [:__struct__, :__set__] == 36 | Keyword.keys(MySchema.schema()) 37 | end 38 | 39 | test "stores which keys were explicitly provided or have defaults" do 40 | assert MySchema.validate!(foo: "10", bar: "10").__set__ == [:foo, :bar, :baz] 41 | end 42 | 43 | test "to_options returns the validated values for keys that were provided" do 44 | assert MySchema.to_options(MySchema.validate!(foo: "10", bar: "10")) == [ 45 | baz: 10, 46 | bar: "10", 47 | foo: "10" 48 | ] 49 | end 50 | 51 | test "custom types are properly escaped" do 52 | assert MySchema.to_options(MySchema.validate!(foo: "10", bar: "10", buz: "anything")) == [ 53 | baz: 10, 54 | bar: "10", 55 | foo: "10", 56 | buz: "anything" 57 | ] 58 | end 59 | end 60 | end 61 | -------------------------------------------------------------------------------- /test/recursive_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Spark.RecursiveTest do 2 | use ExUnit.Case 3 | 4 | alias Spark.Test.Recursive.Info 5 | 6 | test "recursive DSLs can be defined without recursive elements" do 7 | defmodule Simple do 8 | use Spark.Test.Recursive 9 | 10 | steps do 11 | step(:foo) 12 | end 13 | end 14 | 15 | assert [%Spark.Test.Step{name: :foo, steps: []}] = Info.steps(Simple) 16 | end 17 | 18 | test "recursive DSLs can recurse" do 19 | defmodule OneRecurse do 20 | use Spark.Test.Recursive 21 | 22 | steps do 23 | step :foo do 24 | step(:bar) 25 | end 26 | end 27 | end 28 | 29 | assert [ 30 | %Spark.Test.Step{ 31 | name: :foo, 32 | steps: [ 33 | %Spark.Test.Step{name: :bar, steps: []} 34 | ] 35 | } 36 | ] = Info.steps(OneRecurse) 37 | end 38 | 39 | test "recursive DSLs can be mixed" do 40 | defmodule MixedRecurse do 41 | use Spark.Test.Recursive 42 | 43 | steps do 44 | step :foo do 45 | special_step(:special) 46 | atom(:bar) 47 | end 48 | end 49 | end 50 | 51 | assert [ 52 | %Spark.Test.Step{ 53 | name: :foo, 54 | steps: [ 55 | %Spark.Test.Step{name: :special, steps: []}, 56 | %Spark.Test.Atom{name: :bar} 57 | ] 58 | } 59 | ] = Info.steps(MixedRecurse) 60 | end 61 | 62 | test "recursive DSLs that share entities don't collide" do 63 | defmodule MixedRecurseSharedEntity do 64 | use Spark.Test.Recursive 65 | 66 | steps do 67 | step :foo do 68 | special_step(:special) do 69 | atom(:bar) 70 | end 71 | 72 | atom(:bar) 73 | end 74 | end 75 | end 76 | 77 | assert [ 78 | %Spark.Test.Step{ 79 | name: :foo, 80 | steps: [ 81 | %Spark.Test.Step{ 82 | name: :special, 83 | steps: [ 84 | %Spark.Test.Atom{name: :bar} 85 | ] 86 | }, 87 | %Spark.Test.Atom{name: :bar} 88 | ] 89 | } 90 | ] = Info.steps(MixedRecurseSharedEntity) 91 | end 92 | 93 | test "recursive DSLs that share options don't collide" do 94 | defmodule OptionsDontCollide do 95 | use Spark.Test.Recursive 96 | 97 | steps do 98 | step :foo do 99 | number(10) 100 | 101 | special_step(:special) do 102 | number(12) 103 | end 104 | end 105 | end 106 | end 107 | 108 | assert [ 109 | %Spark.Test.Step{ 110 | name: :foo, 111 | number: 10, 112 | steps: [ 113 | %Spark.Test.Step{ 114 | name: :special, 115 | number: 12 116 | } 117 | ] 118 | } 119 | ] = Info.steps(OptionsDontCollide) 120 | end 121 | 122 | test "recursive DSLs can share entities and be deeply nested" do 123 | defmodule DeeplyNested do 124 | use Spark.Test.Recursive 125 | 126 | steps do 127 | step :foo do 128 | special_step(:special) do 129 | atom(:bar) 130 | 131 | step :step_in_special do 132 | step :step_in_special2 do 133 | atom(:bar) 134 | end 135 | end 136 | end 137 | 138 | step :not_special do 139 | special_step(:special2) do 140 | atom(:bar) 141 | end 142 | end 143 | end 144 | end 145 | end 146 | 147 | assert [ 148 | %Spark.Test.Step{ 149 | name: :foo, 150 | steps: [ 151 | %Spark.Test.Step{ 152 | name: :special, 153 | steps: [ 154 | %Spark.Test.Atom{name: :bar}, 155 | %Spark.Test.Step{ 156 | name: :step_in_special, 157 | steps: [ 158 | %Spark.Test.Step{ 159 | name: :step_in_special2, 160 | steps: [ 161 | %Spark.Test.Atom{name: :bar} 162 | ] 163 | } 164 | ] 165 | } 166 | ] 167 | }, 168 | %Spark.Test.Step{ 169 | name: :not_special, 170 | steps: [ 171 | %Spark.Test.Step{ 172 | name: :special2, 173 | steps: [ 174 | %Spark.Test.Atom{name: :bar} 175 | ] 176 | } 177 | ] 178 | } 179 | ] 180 | } 181 | ] = Info.steps(DeeplyNested) 182 | end 183 | end 184 | -------------------------------------------------------------------------------- /test/spark_test.exs: -------------------------------------------------------------------------------- 1 | defmodule SparkTest do 2 | use ExUnit.Case 3 | doctest Spark 4 | doctest Spark.Options 5 | end 6 | -------------------------------------------------------------------------------- /test/support/contact/contact.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.Contact do 2 | @moduledoc false 3 | 4 | @type t :: module 5 | 6 | defmodule Dsl do 7 | @moduledoc false 8 | 9 | @contact %Spark.Dsl.Section{ 10 | name: :contact, 11 | no_depend_modules: [:module], 12 | schema: [ 13 | module: [ 14 | type: :atom, 15 | doc: "A module" 16 | ], 17 | contacter: [ 18 | doc: "A function that wil contact this person with a message", 19 | type: 20 | {:spark_function_behaviour, Spark.Test.Contact.Contacter, 21 | Spark.Test.ContacterBuiltins, {Spark.Test.Contact.Contacter.Function, 1}} 22 | ] 23 | ] 24 | } 25 | 26 | @personal_details %Spark.Dsl.Section{ 27 | name: :personal_details, 28 | schema: [ 29 | first_name: [ 30 | type: :string, 31 | doc: "The first name of the contact", 32 | required: true 33 | ], 34 | last_name: [ 35 | type: :string, 36 | doc: "The last name of the contact" 37 | ], 38 | contact: [ 39 | type: :string, 40 | doc: "Added to incur a conflict between this and the `contact` top level section." 41 | ] 42 | ] 43 | } 44 | 45 | @address %Spark.Dsl.Section{ 46 | name: :address, 47 | schema: [ 48 | street: [ 49 | type: :string, 50 | doc: "The street address" 51 | ] 52 | ] 53 | } 54 | 55 | defmodule Preset do 56 | @moduledoc false 57 | defstruct [ 58 | :name, 59 | :contacter, 60 | :default_message, 61 | :thing, 62 | :singleton, 63 | :__identifier__, 64 | special?: false 65 | ] 66 | end 67 | 68 | defmodule Singleton do 69 | @moduledoc false 70 | defstruct [:value] 71 | end 72 | 73 | @singleton %Spark.Dsl.Entity{ 74 | name: :singleton, 75 | args: [:value], 76 | target: Singleton, 77 | schema: [ 78 | value: [ 79 | type: :any 80 | ] 81 | ] 82 | } 83 | 84 | @preset_with_fn_arg %Spark.Dsl.Entity{ 85 | name: :preset_with_fn_arg, 86 | args: [:name, :contacter], 87 | target: Preset, 88 | schema: [ 89 | name: [ 90 | type: :atom 91 | ], 92 | default_message: [ 93 | type: :string 94 | ], 95 | contacter: [ 96 | type: 97 | {:spark_function_behaviour, Spark.Test.Contact.Contacter, 98 | Spark.Test.ContacterBuiltins, {Spark.Test.Contact.Contacter.Function, 1}} 99 | ] 100 | ] 101 | } 102 | 103 | @preset %Spark.Dsl.Entity{ 104 | name: :preset, 105 | args: [:name], 106 | target: Preset, 107 | identifier: :name, 108 | entities: [ 109 | singleton: [@singleton] 110 | ], 111 | singleton_entity_keys: [:singleton], 112 | schema: [ 113 | name: [ 114 | type: :atom 115 | ], 116 | default_message: [ 117 | type: :string 118 | ], 119 | contacter: [ 120 | type: 121 | {:spark_function_behaviour, Spark.Test.Contact.Contacter, 122 | Spark.Test.ContacterBuiltins, {Spark.Test.Contact.Contacter.Function, 1}} 123 | ] 124 | ] 125 | } 126 | 127 | @preset_with_quoted %Spark.Dsl.Entity{ 128 | name: :preset_with_quoted, 129 | args: [:name], 130 | target: Preset, 131 | schema: [ 132 | name: [ 133 | type: :quoted 134 | ], 135 | default_message: [ 136 | type: :quoted 137 | ] 138 | ] 139 | } 140 | 141 | @preset_with_one_optional %Spark.Dsl.Entity{ 142 | name: :preset_with_one_optional, 143 | args: [:name, {:optional, :default_message, "flubber"}], 144 | target: Preset, 145 | schema: [ 146 | name: [ 147 | type: :atom, 148 | required: true 149 | ], 150 | default_message: [ 151 | type: {:or, [:string, {:in, [nil]}]} 152 | ], 153 | contacter: [ 154 | type: 155 | {:spark_function_behaviour, Spark.Test.Contact.Contacter, 156 | Spark.Test.ContacterBuiltins, {Spark.Test.Contact.Contacter.Function, 1}} 157 | ] 158 | ] 159 | } 160 | 161 | def identity(x) do 162 | x 163 | end 164 | 165 | @preset_with_first_optional %Spark.Dsl.Entity{ 166 | name: :preset_with_first_optional, 167 | args: [ 168 | {:optional, :name, :atom}, 169 | :default_message 170 | ], 171 | target: Preset, 172 | schema: [ 173 | name: [ 174 | type: :atom, 175 | required: true 176 | ], 177 | default_message: [ 178 | type: {:or, [:string, {:in, [nil]}]} 179 | ], 180 | contacter: [ 181 | type: 182 | {:spark_function_behaviour, Spark.Test.Contact.Contacter, 183 | Spark.Test.ContacterBuiltins, {Spark.Test.Contact.Contacter.Function, 1}} 184 | ] 185 | ] 186 | } 187 | 188 | @preset_with_optional %Spark.Dsl.Entity{ 189 | name: :preset_with_optional, 190 | args: [{:optional, :name, :atom}, {:optional, :default_message}], 191 | target: Preset, 192 | schema: [ 193 | name: [ 194 | type: :atom, 195 | required: true 196 | ], 197 | default_message: [ 198 | type: {:or, [:string, {:in, [nil]}]} 199 | ], 200 | contacter: [ 201 | type: 202 | {:spark_function_behaviour, Spark.Test.Contact.Contacter, 203 | Spark.Test.ContacterBuiltins, {Spark.Test.Contact.Contacter.Function, 1}} 204 | ] 205 | ] 206 | } 207 | 208 | @preset_with_snippet %Spark.Dsl.Entity{ 209 | name: :preset_with_snippet, 210 | args: [:name], 211 | target: Preset, 212 | snippet: "preset_with_snippet ${1::doc_brown}", 213 | schema: [ 214 | name: [ 215 | type: :atom, 216 | required: true 217 | ], 218 | thing: [ 219 | type: :string, 220 | required: false 221 | ] 222 | ] 223 | } 224 | 225 | @nested_preset %Spark.Dsl.Section{ 226 | name: :nested_preset, 227 | schema: [ 228 | name: [ 229 | type: :atom 230 | ] 231 | ] 232 | } 233 | 234 | @presets %Spark.Dsl.Section{ 235 | name: :presets, 236 | sections: [ 237 | @nested_preset 238 | ], 239 | entities: [ 240 | @preset, 241 | @preset_with_fn_arg, 242 | @preset_with_optional, 243 | @preset_with_one_optional, 244 | @preset_with_quoted, 245 | @preset_with_snippet, 246 | @preset_with_first_optional 247 | ] 248 | } 249 | 250 | @awesome_status %Spark.Dsl.Section{ 251 | name: :awesome?, 252 | schema: [ 253 | awesome?: [ 254 | type: :boolean, 255 | required: true 256 | ] 257 | ] 258 | } 259 | 260 | defmodule Transformer do 261 | @moduledoc false 262 | use Spark.Dsl.Transformer 263 | 264 | def transform(dsl) do 265 | {:ok, Spark.Dsl.Transformer.persist(dsl, {:foo, :bar, :baz}, 10)} 266 | end 267 | end 268 | 269 | use Spark.Dsl.Extension, 270 | sections: [@contact, @personal_details, @address, @presets, @awesome_status], 271 | verifiers: [Spark.Test.Contact.Verifiers.VerifyNotGandalf], 272 | transformers: [Transformer] 273 | 274 | def explain(dsl_state) do 275 | """ 276 | * first_name: #{Spark.Test.Contact.Info.first_name(dsl_state)} 277 | * last_name: #{Spark.Test.Contact.Info.last_name(dsl_state)} 278 | """ 279 | end 280 | end 281 | 282 | use Spark.Dsl, default_extensions: [extensions: Dsl] 283 | 284 | defmacro __using__(opts) do 285 | super(opts) 286 | end 287 | 288 | def explain(_dsl_state, _opts) do 289 | "Here is an explanation" 290 | end 291 | end 292 | -------------------------------------------------------------------------------- /test/support/contact/contact_patcher.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.ContactPatcher do 2 | @moduledoc false 3 | @special_preset %Spark.Dsl.Entity{ 4 | name: :special_preset, 5 | args: [:name], 6 | target: Spark.Test.Contact.Dsl.Preset, 7 | schema: [ 8 | name: [ 9 | type: :atom 10 | ], 11 | foo: [ 12 | type: :atom 13 | ] 14 | ], 15 | auto_set_fields: [ 16 | special?: true 17 | ] 18 | } 19 | 20 | @section_patch %Spark.Dsl.Patch.AddEntity{ 21 | section_path: [:presets], 22 | entity: @special_preset 23 | } 24 | 25 | @nested_section_patch %Spark.Dsl.Patch.AddEntity{ 26 | section_path: [:presets, :nested_preset], 27 | entity: @special_preset 28 | } 29 | 30 | use Spark.Dsl.Extension, dsl_patches: [@section_patch, @nested_section_patch] 31 | end 32 | -------------------------------------------------------------------------------- /test/support/contact/contacter.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.Contact.Contacter do 2 | @moduledoc false 3 | @callback contact(Spark.Test.Contact.t(), message :: String.t()) :: {:ok, term} | {:error, term} 4 | end 5 | -------------------------------------------------------------------------------- /test/support/contact/fragment.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.Contact.TedDansenFragment do 2 | @moduledoc false 3 | use Spark.Dsl.Fragment, of: Spark.Test.Contact 4 | 5 | address do 6 | street("foobar") 7 | end 8 | end 9 | -------------------------------------------------------------------------------- /test/support/contact/info.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.Contact.Info do 2 | @moduledoc false 3 | 4 | def first_name(contact) do 5 | Spark.Dsl.Extension.get_opt(contact, [:personal_details], :first_name) 6 | end 7 | 8 | def last_name(contact) do 9 | Spark.Dsl.Extension.get_opt(contact, [:personal_details], :last_name) 10 | end 11 | 12 | def contacter(contact) do 13 | Spark.Dsl.Extension.get_opt(contact, [:contact], :contacter) 14 | end 15 | 16 | def module(contact) do 17 | Spark.Dsl.Extension.get_opt(contact, [:contact], :module) 18 | end 19 | 20 | def presets(contact) do 21 | Spark.Dsl.Extension.get_entities(contact, [:presets]) 22 | end 23 | end 24 | -------------------------------------------------------------------------------- /test/support/contact/ted_dansen.ex: -------------------------------------------------------------------------------- 1 | defmodule TedDansen do 2 | @moduledoc "Stuff" 3 | use Spark.Test.Contact, fragments: [Spark.Test.Contact.TedDansenFragment] 4 | 5 | alias Foo.Bar, as: Bar 6 | alias Foo.Bar, as: Buz 7 | 8 | contact do 9 | module(Bar.Baz) 10 | module(Buz) 11 | end 12 | 13 | personal_details do 14 | first_name("Ted") 15 | 16 | last_name("Dansen") 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /test/support/contact/verifiers/verify_not_gandalf.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.Contact.Verifiers.VerifyNotGandalf do 2 | @moduledoc false 3 | use Spark.Dsl.Verifier 4 | alias Spark.Dsl.Verifier 5 | 6 | def verify(dsl) do 7 | if Spark.Test.Contact.Info.first_name(dsl) == "Gandalf" do 8 | {:error, 9 | Spark.Error.DslError.exception( 10 | message: "Cannot be gandalf", 11 | path: [:personal_details, :first_name], 12 | module: Verifier.get_persisted(dsl, :module) 13 | )} 14 | else 15 | :ok 16 | end 17 | end 18 | end 19 | -------------------------------------------------------------------------------- /test/support/example_contacter.ex: -------------------------------------------------------------------------------- 1 | defmodule ExampleContacter do 2 | @moduledoc false 3 | @behaviour Spark.Test.Contact.Contacter 4 | def contact(_, _), do: {:ok, "contacted"} 5 | end 6 | 7 | defmodule Spark.Test.ContacterBuiltins do 8 | @moduledoc false 9 | def example, do: {ExampleContacter, []} 10 | end 11 | -------------------------------------------------------------------------------- /test/support/example_options.ex: -------------------------------------------------------------------------------- 1 | defmodule ExampleOptions do 2 | @moduledoc false 3 | @schema [ 4 | option: [ 5 | type: :string, 6 | doc: "An option" 7 | ] 8 | ] 9 | 10 | @doc "Some explanation" 11 | @doc spark_opts: [{2, @schema}] 12 | def func(a, b, opts \\ []) do 13 | with {:ok, opts} <- Spark.Options.validate(opts, @schema) do 14 | {a, b, opts} 15 | end 16 | end 17 | end 18 | -------------------------------------------------------------------------------- /test/support/info/my_extension.ex: -------------------------------------------------------------------------------- 1 | defmodule MyExtension do 2 | @moduledoc false 3 | 4 | @my_section %Spark.Dsl.Section{ 5 | name: :my_section, 6 | schema: [ 7 | map_option: [ 8 | type: :map, 9 | required: false, 10 | default: %{} 11 | ] 12 | ], 13 | entities: [] 14 | } 15 | use Spark.Dsl.Extension, sections: [@my_section] 16 | end 17 | -------------------------------------------------------------------------------- /test/support/info/my_extension_info.ex: -------------------------------------------------------------------------------- 1 | defmodule MyExtension.Info do 2 | @moduledoc false 3 | use Spark.InfoGenerator, extension: MyExtension, sections: [:my_section] 4 | end 5 | -------------------------------------------------------------------------------- /test/support/recursive/atom.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.Atom do 2 | @moduledoc false 3 | defstruct [:name] 4 | end 5 | -------------------------------------------------------------------------------- /test/support/recursive/info.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.Recursive.Info do 2 | @moduledoc false 3 | use Spark.InfoGenerator, extension: Spark.Test.Recursive.Dsl, sections: [:steps] 4 | end 5 | -------------------------------------------------------------------------------- /test/support/recursive/recursive.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.Recursive do 2 | @moduledoc false 3 | defmodule Dsl do 4 | @moduledoc false 5 | @atom %Spark.Dsl.Entity{ 6 | name: :atom, 7 | target: Spark.Test.Atom, 8 | args: [:name], 9 | schema: [ 10 | name: [ 11 | type: :atom, 12 | required: true 13 | ] 14 | ] 15 | } 16 | 17 | @step %Spark.Dsl.Entity{ 18 | name: :step, 19 | target: Spark.Test.Step, 20 | args: [:name], 21 | recursive_as: :steps, 22 | entities: [ 23 | steps: [@atom] 24 | ], 25 | schema: [ 26 | name: [ 27 | type: :atom, 28 | required: true 29 | ], 30 | number: [ 31 | type: :integer 32 | ] 33 | ] 34 | } 35 | 36 | @special_step %Spark.Dsl.Entity{ 37 | name: :special_step, 38 | target: Spark.Test.Step, 39 | args: [:name], 40 | recursive_as: :steps, 41 | entities: [ 42 | steps: [@atom] 43 | ], 44 | schema: [ 45 | name: [ 46 | type: :atom, 47 | required: true 48 | ], 49 | number: [ 50 | type: :integer 51 | ] 52 | ] 53 | } 54 | 55 | @steps %Spark.Dsl.Section{ 56 | name: :steps, 57 | entities: [@step, @special_step, @atom] 58 | } 59 | 60 | use Spark.Dsl.Extension, 61 | sections: [@steps] 62 | end 63 | 64 | use Spark.Dsl, default_extensions: [extensions: Dsl] 65 | end 66 | -------------------------------------------------------------------------------- /test/support/recursive/step.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.Step do 2 | @moduledoc false 3 | defstruct [:name, :number, :__identifier__, steps: []] 4 | end 5 | -------------------------------------------------------------------------------- /test/support/top_level/info.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.TopLevel.Info do 2 | @moduledoc false 3 | use Spark.InfoGenerator, extension: Spark.Test.TopLevel.Dsl, sections: [:steps] 4 | end 5 | -------------------------------------------------------------------------------- /test/support/top_level/top_level.ex: -------------------------------------------------------------------------------- 1 | defmodule Spark.Test.TopLevel do 2 | @moduledoc false 3 | defmodule Dsl do 4 | @moduledoc false 5 | @atom %Spark.Dsl.Entity{ 6 | name: :atom, 7 | target: Spark.Test.Atom, 8 | args: [:name], 9 | schema: [ 10 | name: [ 11 | type: :atom, 12 | required: true 13 | ] 14 | ] 15 | } 16 | 17 | @step %Spark.Dsl.Entity{ 18 | name: :step, 19 | target: Spark.Test.Step, 20 | args: [:name], 21 | recursive_as: :steps, 22 | entities: [ 23 | steps: [] 24 | ], 25 | schema: [ 26 | name: [ 27 | type: :atom, 28 | required: true 29 | ], 30 | number: [ 31 | type: :integer 32 | ] 33 | ] 34 | } 35 | 36 | @special_step %Spark.Dsl.Entity{ 37 | name: :special_step, 38 | target: Spark.Test.Step, 39 | args: [:name], 40 | recursive_as: :steps, 41 | entities: [ 42 | steps: [] 43 | ], 44 | schema: [ 45 | name: [ 46 | type: :atom, 47 | required: true 48 | ], 49 | number: [ 50 | type: :integer 51 | ] 52 | ] 53 | } 54 | 55 | @nested_section %Spark.Dsl.Section{ 56 | name: :nested_section, 57 | schema: [ 58 | bar: [ 59 | type: :integer, 60 | doc: "Some documentation" 61 | ] 62 | ] 63 | } 64 | 65 | @steps %Spark.Dsl.Section{ 66 | name: :steps, 67 | top_level?: true, 68 | sections: [ 69 | @nested_section 70 | ], 71 | schema: [ 72 | foo: [ 73 | type: :integer, 74 | doc: "Some documentation" 75 | ] 76 | ], 77 | entities: [@step, @special_step, @atom] 78 | } 79 | 80 | use Spark.Dsl.Extension, 81 | sections: [@steps] 82 | end 83 | 84 | use Spark.Dsl, default_extensions: [extensions: Dsl] 85 | end 86 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start() 2 | -------------------------------------------------------------------------------- /test/top_level_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Spark.TopLevelTest do 2 | use ExUnit.Case 3 | 4 | alias Spark.Test.TopLevel.Info 5 | 6 | test "top level DSL entities are available" do 7 | defmodule Simple do 8 | use Spark.Test.TopLevel 9 | 10 | foo(10) 11 | 12 | step :foo do 13 | step(:bar) 14 | end 15 | end 16 | 17 | assert [%Spark.Test.Step{name: :foo, steps: [%Spark.Test.Step{}]}] = Info.steps(Simple) 18 | assert {:ok, 10} = Info.steps_foo(Simple) 19 | end 20 | 21 | test "nested DSL sections are available" do 22 | defmodule Nested do 23 | use Spark.Test.TopLevel 24 | 25 | nested_section do 26 | bar(20) 27 | end 28 | 29 | foo(10) 30 | 31 | step :foo do 32 | step(:bar) 33 | end 34 | end 35 | 36 | assert [%Spark.Test.Step{name: :foo, steps: [%Spark.Test.Step{}]}] = Info.steps(Nested) 37 | assert {:ok, 10} = Info.steps_foo(Nested) 38 | assert {:ok, 20} = Info.steps_nested_section_bar(Nested) 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /test/transformer_test.exs: -------------------------------------------------------------------------------- 1 | defmodule TransfomerTest do 2 | use ExUnit.Case 3 | 4 | test "build_entity respects auto_set_fields" do 5 | defmodule Entity do 6 | defstruct [:set_automatically, :set_manually, :__identifier__] 7 | end 8 | 9 | defmodule Dsl do 10 | @entity %Spark.Dsl.Entity{ 11 | name: :entity, 12 | target: Entity, 13 | schema: [ 14 | set_automatically: [ 15 | type: :boolean 16 | ], 17 | set_manually: [ 18 | type: :boolean 19 | ] 20 | ], 21 | auto_set_fields: [set_automatically: true] 22 | } 23 | 24 | @section %Spark.Dsl.Section{ 25 | name: :section, 26 | entities: [ 27 | @entity 28 | ] 29 | } 30 | 31 | use Spark.Dsl.Extension, sections: [@section] 32 | use Spark.Dsl, default_extensions: [extensions: Dsl] 33 | end 34 | 35 | {:ok, entity} = Spark.Dsl.Transformer.build_entity(Dsl, [:section], :entity, []) 36 | 37 | assert entity.set_automatically == true 38 | assert entity.set_manually == nil 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /usage-rules.md: -------------------------------------------------------------------------------- 1 | # Rules for working with Spark 2 | 3 | ## Overview 4 | 5 | Spark is a framework for building Domain-Specific Languages (DSLs) in Elixir. It enables developers to create declarative DSLs with minimal boilerplate. 6 | 7 | ## Core Architecture 8 | 9 | ### 1. Entity-Section-Extension Model 10 | 11 | Spark DSLs are built using three core components: 12 | 13 | ```elixir 14 | # 1. Entities - Individual DSL constructors 15 | @field %Spark.Dsl.Entity{ 16 | name: :field, # DSL function name 17 | target: MyApp.Field, # Struct to build 18 | args: [:name, :type], # Positional arguments 19 | schema: [ # Validation schema 20 | name: [type: :atom, required: true], 21 | type: [type: :atom, required: true] 22 | ] 23 | } 24 | 25 | # 2. Sections - Organize related entities 26 | @fields %Spark.Dsl.Section{ 27 | name: :fields, 28 | entities: [@field], # Contains entities 29 | sections: [], # Can nest sections 30 | schema: [ # Section-level options 31 | required: [type: {:list, :atom}, default: []] 32 | ] 33 | } 34 | 35 | # 3. Extensions - Package DSL functionality 36 | defmodule MyExtension do 37 | use Spark.Dsl.Extension, 38 | sections: [@fields], 39 | transformers: [MyTransformer], 40 | verifiers: [MyVerifier] 41 | end 42 | ``` 43 | 44 | ### 2. Compile-Time Processing Pipeline 45 | 46 | ``` 47 | DSL Definition → Parsing → Validation → Transformation → Verification → Code Generation 48 | ``` 49 | 50 | 1. **Parsing**: DSL syntax converted to internal representation 51 | 2. **Validation**: Schema validation via `Spark.Options` 52 | 3. **Transformation**: Transformers modify DSL state 53 | 4. **Verification**: Verifiers validate final state 54 | 5. **Code Generation**: Generate runtime modules/functions 55 | 56 | ## Creating a DSL with Spark 57 | 58 | ### Basic DSL Structure 59 | 60 | ```elixir 61 | # Step 1: Define your extension 62 | defmodule MyLibrary.Dsl do 63 | @my_nested_entity %Spark.Dsl.Entity{ 64 | name: :my_nested_entity, 65 | target: MyLibrary.MyNestedEntity, 66 | describe: """ 67 | Describe the entity. 68 | """, 69 | examples: [ 70 | "my_nested_entity :name, option: \"something\"", 71 | """ 72 | my_nested_entity :name do 73 | option "something" 74 | end 75 | """ 76 | ], 77 | args: [:name], 78 | schema: [ 79 | name: [type: :atom, required: true, doc: "The name of the entity"], 80 | option: [type: :string, default: "default", doc: "An option that does X"] 81 | ] 82 | } 83 | 84 | @my_entity %Spark.Dsl.Entity{ 85 | name: :my_entity, 86 | target: MyLibrary.MyEntity, 87 | args: [:name], 88 | entities: [ 89 | entities: [@my_nested_entity] 90 | ], 91 | describe: ..., 92 | examples: [...], 93 | schema: [ 94 | name: [type: :atom, required: true, doc: "The name of the entity"], 95 | option: [type: :string, default: "default", doc: "An option that does X"] 96 | ] 97 | } 98 | 99 | @my_section %Spark.Dsl.Section{ 100 | name: :my_section, 101 | describe: ..., 102 | examples: [...], 103 | schema: [ 104 | section_option: [type: :string] 105 | ], 106 | entities: [@my_entity] 107 | } 108 | 109 | use Spark.Dsl.Extension, sections: [@my_section] 110 | end 111 | 112 | # Entity Targets 113 | defmodule MyLibrary.MyEntity do 114 | defstruct [:name, :option, :entities] 115 | end 116 | 117 | defmodule MyLibrary.MyNestedEntity do 118 | defstruct [:name, :option] 119 | end 120 | 121 | # Step 2: Create the DSL module 122 | defmodule MyLibrary do 123 | use Spark.Dsl, 124 | default_extensions: [ 125 | extensions: [MyLibrary.Dsl] 126 | ] 127 | end 128 | 129 | # Step 3: Use the DSL 130 | defmodule MyApp.Example do 131 | use MyLibrary 132 | 133 | my_section do 134 | my_entity :example_name do 135 | option "custom value" 136 | my_nested_entity :nested_name do 137 | option "nested custom value" 138 | end 139 | end 140 | end 141 | end 142 | ``` 143 | 144 | ### Working with Transformers 145 | 146 | Transformers modify the DSL at compile time: 147 | 148 | ```elixir 149 | defmodule MyLibrary.Transformers.AddDefaults do 150 | use Spark.Dsl.Transformer 151 | 152 | def transform(dsl_state) do 153 | # Add a default entity if none exist 154 | entities = Spark.Dsl.Extension.get_entities(dsl_state, [:my_section]) 155 | 156 | if Enum.empty?(entities) do 157 | {:ok, 158 | Spark.Dsl.Transformer.add_entity( 159 | dsl_state, 160 | [:my_section], 161 | %MyLibrary.MyEntity{name: :default} 162 | )} 163 | else 164 | {:ok, dsl_state} 165 | end 166 | end 167 | 168 | # Control execution order 169 | def after?(_), do: false 170 | def before?(OtherTransformer), do: true 171 | def before?(_), do: false 172 | end 173 | ``` 174 | 175 | ### Creating Verifiers 176 | 177 | Verifiers validate the final DSL state: 178 | 179 | ```elixir 180 | defmodule MyLibrary.Verifiers.UniqueNames do 181 | use Spark.Dsl.Verifier 182 | 183 | def verify(dsl_state) do 184 | entities = Spark.Dsl.Extension.get_entities(dsl_state, [:my_section]) 185 | names = Enum.map(entities, & &1.name) 186 | 187 | if length(names) == length(Enum.uniq(names)) do 188 | :ok 189 | else 190 | {:error, 191 | Spark.Error.DslError.exception(, 192 | message: "Entity names must be unique", 193 | path: [:my_section], 194 | module: Spark.Dsl.Verifier.get_persisted(dsl_state, :module) 195 | )} 196 | end 197 | end 198 | end 199 | ``` 200 | 201 | ## Info Modules 202 | 203 | Spark provides an Info Generator system for introspection of DSL-defined modules. All introspection should go through info modules rather than accessing DSL state directly. 204 | 205 | ### 1. Info Module Generation 206 | ```elixir 207 | # Define an info module for your extension 208 | defmodule MyLibrary.Info do 209 | use Spark.InfoGenerator, 210 | extension: MyLibrary.Dsl, 211 | sections: [:my_section] 212 | end 213 | 214 | # The info generator creates accessor functions for DSL data: 215 | # - MyLibrary.Info.my_section_entities(module) 216 | # - MyLibrary.Info.my_section_option!(module, :option_name) 217 | # - MyLibrary.Info.my_section_option(module, :option_name, default) 218 | 219 | # Example usage: 220 | entities = MyLibrary.Info.my_section_entities(MyApp.Example) 221 | required_option = MyLibrary.Info.my_section_option!(MyApp.Example, :required) 222 | {:ok,optional_value} = MyLibrary.Info.my_section_option(MyApp.Example, :optional, "default") 223 | 224 | ``` 225 | 226 | Benefits of using info modules: 227 | - **Type Safety**: Generated functions provide compile-time guarantees 228 | - **Performance**: Info data is cached and optimized for runtime access 229 | - **Consistency**: Standardized API across all Spark-based libraries 230 | - **Documentation**: Auto-generated docs for introspection functions 231 | 232 | ## Key APIs for Development 233 | 234 | ### Essential Functions 235 | 236 | ```elixir 237 | # Get entities from a section 238 | entities = Spark.Dsl.Extension.get_entities(dsl_state, [:section_path]) 239 | 240 | # Get section options 241 | option = Spark.Dsl.Extension.get_opt(dsl_state, [:section_path], :option_name, default_value) 242 | 243 | # Add entities during transformation 244 | dsl_state = Spark.Dsl.Transformer.add_entity(dsl_state, [:section_path], entity) 245 | 246 | # Persist data across transformers 247 | dsl_state = Spark.Dsl.Transformer.persist(dsl_state, :key, value) 248 | value = Spark.Dsl.Transformer.get_persisted(dsl_state, :key) 249 | 250 | # Get current module being compiled 251 | module = Spark.Dsl.Verifier.get_persisted(dsl_state, :module) 252 | ``` 253 | 254 | ### Schema Definition with Spark.Options 255 | 256 | ```elixir 257 | schema = [ 258 | required_field: [ 259 | type: :atom, 260 | required: true, 261 | doc: "Documentation for the field" 262 | ], 263 | optional_field: [ 264 | type: {:list, :string}, 265 | default: [], 266 | doc: "Optional field with default" 267 | ], 268 | complex_field: [ 269 | type: {:or, [:atom, :string, {:struct, MyStruct}]}, 270 | required: false 271 | ] 272 | ] 273 | ``` 274 | 275 | ## Important Implementation Details 276 | 277 | ### 1. Compile-Time vs Runtime 278 | 279 | - **DSL processing happens at compile time** 280 | - Transformers and verifiers run during compilation 281 | - Generated code is optimized for runtime performance 282 | - Use `persist/3` to cache expensive computations 283 | 284 | ### 2. Error Handling 285 | 286 | Always provide context in errors: 287 | ```elixir 288 | {:error, 289 | Spark.Error.DslError.exception( 290 | message: "Clear error message", 291 | path: [:section, :subsection], # DSL path 292 | module: module # Module being compiled 293 | )} 294 | ``` 295 | 296 | ## Common Gotchas 297 | 298 | ### 1. Compilation Deadlocks 299 | ```elixir 300 | # WRONG - Causes deadlock 301 | def transform(dsl_state) do 302 | module = get_persisted(dsl_state, :module) 303 | module.some_function() # Module isn't compiled yet! 304 | end 305 | 306 | # RIGHT - Use DSL state 307 | def transform(dsl_state) do 308 | entities = get_entities(dsl_state, [:section]) 309 | # Work with DSL state, not module functions 310 | end 311 | ``` 312 | 313 | ### 2. Extension Order 314 | - Extensions are processed in order 315 | - Later extensions can modify earlier ones 316 | - Use transformer ordering for dependencies 317 | 318 | ## Performance Considerations 319 | 320 | 1. **Compilation Performance** 321 | - Heavy transformers slow compilation 322 | - Cache expensive computations with `persist/3` 323 | - Use verifiers instead of transformers when possible 324 | 325 | 2. **Runtime Performance** 326 | - DSL processing has zero runtime overhead 327 | - Generated code is optimized 328 | - Info modules cache DSL data efficiently 329 | 330 | 3. **Memory Usage** 331 | - DSL state is cleaned up after compilation 332 | - Runtime footprint is minimal 333 | - Use structs efficiently in entities 334 | 335 | ## Summary 336 | 337 | Spark enables: 338 | - Clean, declarative DSL syntax 339 | - Compile-time validation and transformation 340 | - Extensible architecture 341 | - Excellent developer experience 342 | 343 | When coding with Spark: 344 | 1. Think in terms of entities, sections, and extensions 345 | 2. Leverage compile-time processing for validation 346 | 3. Use transformers for complex logic 347 | 4. Test DSLs thoroughly 348 | 5. Provide clear error messages 349 | 6. Document your DSLs well 350 | 351 | --------------------------------------------------------------------------------