├── .dockerignore ├── .formatter.exs ├── .github └── workflows │ └── main.yml ├── .gitignore ├── .tool-versions ├── .tx └── config ├── LICENSE ├── README.md ├── config ├── bonfire_valueflows.exs └── config.exs ├── deps.git ├── deps.hex ├── justfile ├── lib ├── agent │ ├── agent_graphql.ex │ ├── agents.ex │ ├── organizations.ex │ └── people.ex ├── agreement │ ├── agreement.ex │ └── graphql.ex ├── all_migrations.ex ├── appreciation │ └── graphql.ex ├── claim │ ├── claim.ex │ ├── claim_queries.ex │ ├── claims.ex │ ├── graphql.ex │ └── migrations.ex ├── economic_event │ ├── event.ex │ ├── event_queries.ex │ ├── event_side_effects.ex │ ├── events.ex │ ├── graphql.ex │ ├── migrations.ex │ ├── trace.ex │ └── track.ex ├── economic_resource │ ├── graphql.ex │ ├── migrations.ex │ ├── resource.ex │ ├── resource_queries.ex │ └── resources.ex ├── hydration.ex ├── knowledge │ ├── action │ │ ├── action.ex │ │ ├── actions.ex │ │ └── graphql.ex │ ├── process_specification │ │ ├── graphql.ex │ │ ├── migrations.ex │ │ ├── process_spec.ex │ │ ├── process_spec_queries.ex │ │ └── process_specs.ex │ └── resource_specification │ │ ├── graphql.ex │ │ ├── migrations.ex │ │ ├── resource_spec.ex │ │ ├── resource_spec_queries.ex │ │ └── resource_specs.ex ├── plan │ └── graphql.ex ├── planning │ ├── commitment │ │ ├── commitment.ex │ │ ├── commitment_queries.ex │ │ ├── commitments.ex │ │ ├── graphql.ex │ │ └── migrations.ex │ ├── intent │ │ ├── intent.ex │ │ ├── intent_graphql.ex │ │ ├── intent_queries.ex │ │ ├── intents.ex │ │ └── migrations.ex │ └── satisfaction │ │ ├── graphql.ex │ │ ├── migrations.ex │ │ ├── satisfaction.ex │ │ ├── satisfaction_queries.ex │ │ └── satisfactions.ex ├── process │ ├── graphql.ex │ ├── migrations.ex │ ├── process.ex │ ├── process_queries.ex │ └── processes.ex ├── proposal │ ├── _attempts.ex │ ├── migrations.ex │ ├── proposal.ex │ ├── proposal_graphql.ex │ ├── proposal_queries.ex │ ├── proposals.ex │ ├── proposed_intent.ex │ ├── proposed_intent_graphql.ex │ ├── proposed_intent_queries.ex │ ├── proposed_intents.ex │ ├── proposed_to.ex │ ├── proposed_to_graphql.ex │ ├── proposed_to_queries.ex │ └── proposed_tos.ex ├── recipe │ └── graphql.ex ├── scenario │ └── graphql.ex ├── schemas ├── util │ ├── common-data.ex.wip │ ├── federation.ex │ ├── graphql.ex │ ├── simulate.ex │ └── util.ex ├── value_calculation │ ├── graphql.ex │ ├── migrations.ex │ ├── value_calc_queries.ex │ ├── value_calculation.ex │ └── value_calculations.ex └── value_flows.ex ├── mess.exs ├── mix.exs ├── priv ├── gettext │ └── en │ │ └── LC_MESSAGES │ │ └── errors.po ├── localisation │ └── bonfire_valueflows.pot └── repo │ ├── migrations │ ├── .formatter.exs │ ├── 20200523081012_init_pointers.exs │ ├── 20201205095039_import_valueflows.exs │ └── 20211112094942_import_commitment_satisfaction.exs │ └── seeds.exs ├── test ├── action │ ├── action_graphql_test.exs │ └── actions_test.exs ├── agent │ └── person │ │ └── person_graphql_test.exs ├── claim │ ├── claim_graphql_test.exs │ └── claims_test.exs ├── economic_event │ ├── event_federate_remote_test.exs │ ├── event_federate_test.exs │ ├── events_graphql_test.exs │ ├── events_resources_graphql_test.exs │ ├── events_resources_test.exs │ └── events_test.exs ├── economic_resource │ ├── resource_graphql_test.exs │ └── resources_test.exs ├── planning │ ├── intent_federate.exs │ ├── intent_graphql_test.exs │ └── intents_test.exs ├── process │ ├── process_federate.exs │ ├── process_graphql_test.exs │ └── processes_test.exs ├── process_specification │ ├── process_spec_federate.exs │ ├── process_spec_graphql_test.exs │ └── process_specs_test.exs ├── proposal │ ├── proposal_federate.exs │ ├── proposal_graphql_test.exs │ ├── proposals_test.exs │ ├── proposed_intent_graphql_test.exs │ └── proposed_to_graphql_test.exs ├── resource_specification │ ├── resource_spec_federate.exs │ ├── resource_spec_graphql_test.exs │ └── resource_specs_test.exs ├── support │ ├── channel_case.ex │ ├── conn_case.ex │ ├── conn_helpers.ex │ ├── data_case.ex │ ├── data_helpers.ex │ └── valueflows_faking.ex ├── test_helper.exs ├── track_trace │ ├── events_track_trace_graphql_test.exs │ ├── events_track_trace_test.exs │ ├── process_track_trace_graphql_test.exs │ ├── process_track_trace_test.exs │ ├── resource_track_trace_graphql_test.exs │ ├── resource_track_trace_test.exs │ ├── track_and_trace_graphql_test.exs │ └── track_and_trace_test.exs └── value_calculation │ ├── events_value_calculation_test.exs │ ├── graphql_test.exs │ └── value_calculations_test.exs ├── tool-versions-to-env.sh └── xref_graph.dot /.dockerignore: -------------------------------------------------------------------------------- 1 | _build 2 | deps 3 | docs/exdoc 4 | .hex 5 | .mix 6 | .npm 7 | .git 8 | .gitignore 9 | .config 10 | Dockerfile 11 | Makefile 12 | README* 13 | test 14 | priv/static 15 | assets/node_modules 16 | data/search 17 | data/postgres -------------------------------------------------------------------------------- /.formatter.exs: -------------------------------------------------------------------------------- 1 | [ 2 | import_deps: [:ecto, :phoenix], 3 | inputs: ["*.{ex,exs}", "priv/*/seeds.exs", "{config,lib,test}/**/*.{ex,exs}"], 4 | subdirectories: ["priv/*/migrations"] 5 | ] 6 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: Main branch 2 | 3 | on: 4 | push: 5 | branches: 6 | - "main" 7 | paths-ignore: 8 | - '.envrc' 9 | - '.gitignore' 10 | - README.* 11 | - LICENSE 12 | pull_request: 13 | paths-ignore: 14 | - '.envrc' 15 | - '.gitignore' 16 | - README.* 17 | - LICENSE 18 | # Allows you to run this workflow manually from the Actions tab 19 | workflow_dispatch: 20 | 21 | permissions: 22 | contents: write 23 | pull-requests: write 24 | 25 | # Stop previous jobs if they are still running. 26 | # https://docs.github.com/en/actions/learn-github-actions/expressions 27 | # https://docs.github.com/en/actions/learn-github-actions/contexts#github-context 28 | concurrency: 29 | # Use github.run_id on main branch 30 | # Use github.event.pull_request.number on pull requests, so it's unique per pull request 31 | # Use github.ref on other branches, so it's unique per branch 32 | group: ${{ github.workflow }}-${{ github.ref == 'refs/heads/main' && github.run_id || github.event.pull_request.number || github.ref }} 33 | cancel-in-progress: true 34 | 35 | jobs: 36 | build_and_test: 37 | name: Prepare Localisation, Test, Lint 38 | if: ${{ !contains(github.ref, 'update_translations') }} 39 | runs-on: ubuntu-latest 40 | 41 | steps: 42 | 43 | - name: Run CI 44 | uses: bonfire-networks/bonfire-extension-ci-action@latest 45 | with: 46 | tx-token: ${{ secrets.TX_TOKEN || vars.TX_TOKEN }} 47 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # The directory Mix will write compiled artifacts to. 2 | /_build/ 3 | 4 | # If you run "mix test --cover", coverage assets end up here. 5 | /cover/ 6 | 7 | # The directories Mix and npm download your dependencies to. 8 | /deps/ 9 | /.cache/ 10 | /.hex/ 11 | /.mix/ 12 | /.npm/ 13 | /.config/ 14 | 15 | # Where 3rd-party dependencies like ExDoc output generated docs. 16 | /doc/ 17 | 18 | # Ignore .fetch files in case you like to edit your project deps locally. 19 | /.fetch 20 | 21 | # If the VM crashes, it generates a dump, let's ignore it too. 22 | erl_crash.dump 23 | 24 | # Also ignore archive artifacts (built via "mix archive.build"). 25 | *.ez 26 | 27 | # Ignore package tarball (built via "mix hex.build"). 28 | bonfire_valueflows-*.tar 29 | 30 | # If NPM crashes, it generates a log, let's ignore it too. 31 | npm-debug.log 32 | 33 | # The directory NPM downloads your dependencies sources to. 34 | /assets/node_modules/ 35 | 36 | # Since we are building assets from assets/ 37 | /priv/static/ 38 | 39 | # App and user data 40 | /data/ 41 | 42 | # user-local overrides for mess 43 | deps.path 44 | /libs/ 45 | /forks 46 | 47 | .\#* -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | erlang 27.3.4 2 | elixir 1.18.4-otp-27 3 | just 1.40.0 4 | transifex latest 5 | -------------------------------------------------------------------------------- /.tx/config: -------------------------------------------------------------------------------- 1 | [main] 2 | host = https://app.transifex.com 3 | 4 | [o:bonfire:p:bonfire:r:bonfire_valueflows] 5 | file_filter = priv/localisation//LC_MESSAGES/bonfire_valueflows.po 6 | source_file = priv/localisation/bonfire_valueflows.pot 7 | type = PO 8 | minimum_perc = 0 9 | replace_edited_strings = false 10 | keep_translations = false 11 | 12 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ValueFlows 2 | 3 | `ValueFlows` is a [Bonfire](https://bonfire.cafe/) extension that implements [ValueFlows](https://valueflo.ws/) for federated networks to conduct economic activities. 4 | 5 | Implemented so far: 6 | 7 | - Intent 8 | - Proposal 9 | - Economic Event 10 | - Types of Action 11 | - Resource Specification 12 | - Economic Resource 13 | - Process Specification 14 | - Process 15 | - Claim (WIP) 16 | - Value Calculations (WIP) 17 | 18 | As well as an optional GraphQL API for all of the above... 19 | 20 | 21 | ## Handy commands 22 | 23 | ## Copyright and License 24 | 25 | Copyright (c) 2021 ValueFlows, CommonsPub, Reflow & Bonfire Contributors 26 | 27 | This program is free software: you can redistribute it and/or modify 28 | it under the terms of the GNU Affero General Public License as 29 | published by the Free Software Foundation, either version 3 of the 30 | License, or (at your option) any later version. 31 | 32 | This program is distributed in the hope that it will be useful, but 33 | WITHOUT ANY WARRANTY; without even the implied warranty of 34 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU 35 | Affero General Public License for more details. 36 | 37 | You should have received a copy of the GNU Affero General Public 38 | License along with this program. If not, see . 39 | -------------------------------------------------------------------------------- /config/bonfire_valueflows.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | config :bonfire_valueflows, 4 | valid_agent_schemas: [Bonfire.Data.Identity.User] 5 | -------------------------------------------------------------------------------- /config/config.exs: -------------------------------------------------------------------------------- 1 | import Config 2 | 3 | #### Email configuration 4 | 5 | # You will almost certainly want to change at least some of these 6 | 7 | # include Phoenix web server boilerplate 8 | # import_config "bonfire_web_phoenix.exs" 9 | 10 | # include all used Bonfire extensions 11 | import_config "bonfire_valueflows.exs" 12 | 13 | #### Basic configuration 14 | 15 | # You probably won't want to touch these. You might override some in 16 | # other config files. 17 | 18 | config :logger, :console, 19 | format: "$time $metadata[$level] $message\n", 20 | metadata: [:request_id] 21 | 22 | config :mime, :types, %{ 23 | "application/activity+json" => ["activity+json"] 24 | } 25 | 26 | # import_config "#{Mix.env()}.exs" 27 | -------------------------------------------------------------------------------- /deps.git: -------------------------------------------------------------------------------- 1 | bonfire_valueflows_api_schema = "https://github.com/bonfire-networks/bonfire_valueflows_api_schema" 2 | bonfire_common = "https://github.com/bonfire-networks/bonfire_common" 3 | bonfire_fail = "https://github.com/bonfire-networks/bonfire_fail" 4 | 5 | # bonfire_web_phoenix = "https://github.com/bonfire-networks/bonfire_web_phoenix" 6 | bonfire_quantify = "https://github.com/bonfire-networks/bonfire_quantify" 7 | bonfire_geolocate = "https://github.com/bonfire-networks/bonfire_geolocate" 8 | bonfire_tag = "https://github.com/bonfire-networks/bonfire_tag" 9 | 10 | -------------------------------------------------------------------------------- /deps.hex: -------------------------------------------------------------------------------- 1 | ecto_sql = "~> 3.8" 2 | faker = "~> 0.14" # fake data generation 3 | gettext = "~> 0.20" 4 | jason = "~> 1.0" 5 | postgrex = ">= 0.0.0" 6 | telemetry_metrics = "~> 1.0" 7 | telemetry_poller = "~> 1.1" 8 | # for this extension: 9 | formula2 = "~> 0.1" 10 | decimal = "~> 1.6 or ~> 2.0" 11 | untangle = "~> 0.3.0" 12 | arrows = "~> 0.2" 13 | needle = "~> 0.7" 14 | -------------------------------------------------------------------------------- /lib/agent/agent_graphql.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | if Application.compile_env(:bonfire_api_graphql, :modularity) != :disabled do 3 | defmodule ValueFlows.Agent.GraphQL do 4 | alias Bonfire.API.GraphQL 5 | 6 | import Untangle 7 | 8 | # use Absinthe.Schema.Notation 9 | # import_sdl path: "lib/value_flows/graphql/schemas/agent.gql" 10 | 11 | # fake data 12 | # def all_agents(_, _) do 13 | # {:ok, long_list(&Simulate.agent/0)} 14 | # end 15 | 16 | # def agent(%{id: id}, info) do 17 | # {:ok, Simulate.agent()} 18 | # end 19 | # proper resolvers 20 | 21 | # with pagination 22 | def people(page_opts, info) do 23 | people_pages = 24 | if Bonfire.Common.Extend.module_enabled?(CommonsPub.Web.GraphQL.UsersResolver) do 25 | with {:ok, users_pages} <- 26 | CommonsPub.Web.GraphQL.UsersResolver.users(page_opts, info) do 27 | people = 28 | Enum.map( 29 | users_pages.edges, 30 | &(&1 31 | |> ValueFlows.Agent.Agents.character_to_agent()) 32 | ) 33 | 34 | %{users_pages | edges: people} 35 | end 36 | else 37 | people = 38 | ValueFlows.Agent.People.people(nil) 39 | |> Enum.map( 40 | &(&1 41 | |> ValueFlows.Agent.Agents.character_to_agent()) 42 | ) 43 | 44 | %{ 45 | edges: people, 46 | page_info: nil, 47 | total_count: length(people) 48 | } 49 | end 50 | 51 | {:ok, people_pages} 52 | end 53 | 54 | # TODO: pagination 55 | def all_people(%{}, info) do 56 | {:ok, ValueFlows.Agent.People.people(Bonfire.API.GraphQL.current_user(info))} 57 | end 58 | 59 | def person(%{id: id}, info) do 60 | {:ok, 61 | ValueFlows.Agent.People.person( 62 | id, 63 | Bonfire.API.GraphQL.current_user(info) 64 | )} 65 | end 66 | 67 | # with pagination 68 | def organizations(page_opts, info) do 69 | orgz_pages = 70 | if Bonfire.Common.Extend.module_enabled?(Organisation.GraphQL.Resolver) do 71 | with {:ok, pages} <- 72 | Organisation.GraphQL.Resolver.organisations(page_opts, info) do 73 | orgz = 74 | Enum.map( 75 | pages.edges, 76 | &(&1 77 | |> ValueFlows.Agent.Agents.character_to_agent()) 78 | ) 79 | 80 | %{pages | edges: orgz} 81 | end 82 | else 83 | %{} 84 | end 85 | 86 | {:ok, orgz_pages} 87 | end 88 | 89 | # without pagination 90 | def all_organizations(%{}, info) do 91 | {:ok, ValueFlows.Agent.Organizations.organizations(Bonfire.API.GraphQL.current_user(info))} 92 | end 93 | 94 | def organization(%{id: id}, info) do 95 | {:ok, 96 | ValueFlows.Agent.Organizations.organization( 97 | id, 98 | Bonfire.API.GraphQL.current_user(info) 99 | )} 100 | end 101 | 102 | def all_agents(%{}, info) do 103 | {:ok, ValueFlows.Agent.Agents.agents(Bonfire.API.GraphQL.current_user(info))} 104 | end 105 | 106 | def agent(%{id: id}, info) do 107 | {:ok, ValueFlows.Agent.Agents.agent(id, Bonfire.API.GraphQL.current_user(info))} 108 | end 109 | 110 | def agent(_, info) do 111 | my_agent(nil, info) 112 | end 113 | 114 | def my_agent(_, info) do 115 | with {:ok, user} <- GraphQL.current_user_or_not_logged_in(info) do 116 | {:ok, ValueFlows.Agent.Agents.character_to_agent(user)} 117 | end 118 | end 119 | 120 | def mutate_person(_, _) do 121 | {:error, "Please use one of these instead: createUser, updateProfile, deleteSelf"} 122 | end 123 | 124 | def mutate_organization(_, _) do 125 | {:error, 126 | "Please use one of these instead (notice the spelling difference): createOrganisation, updateOrganisation, delete"} 127 | end 128 | end 129 | end 130 | -------------------------------------------------------------------------------- /lib/agent/agents.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Agent.Agents do 3 | # alias ValueFlows.{Simulate} 4 | import Untangle 5 | use Bonfire.Common.Utils 6 | import Bonfire.Common.Config, only: [repo: 0] 7 | 8 | # TODO - change approach to allow pagination 9 | def agents(signed_in_user) do 10 | orgs = ValueFlows.Agent.Organizations.organizations(signed_in_user) 11 | people = ValueFlows.Agent.People.people(signed_in_user) 12 | 13 | orgs ++ people 14 | end 15 | 16 | # FIXME - this works but isn't elegant 17 | def agent(id, signed_in_user) do 18 | case ValueFlows.Agent.People.person(id, signed_in_user) do 19 | {:error, _error} -> 20 | ValueFlows.Agent.Organizations.organization(id, signed_in_user) 21 | 22 | org -> 23 | org 24 | end 25 | end 26 | 27 | def agent_to_character(a) do 28 | a 29 | |> Enums.maybe_put(:summary, Map.get(a, :note)) 30 | |> Enums.maybe_put(:geolocation, Map.get(a, :primary_location)) 31 | end 32 | 33 | def character_to_agent(a) do 34 | # a = Bonfire.Common.Repo.maybe_preload(a, [icon: [:content], image: [:content]]) 35 | 36 | a 37 | |> repo().maybe_preload(:shared_user, label: __MODULE__) 38 | # |> IO.inspect() 39 | |> Enums.merge_structs_as_map( 40 | e(a, :profile, %{ 41 | name: e(a, :character, :username, "anonymous") 42 | }) 43 | ) 44 | |> Enums.merge_structs_as_map(e(a, :character, %{})) 45 | |> Map.put(:image, ValueFlows.Util.image_url(a)) 46 | |> Enums.maybe_put(:primary_location, agent_location(a)) 47 | |> Enums.maybe_put(:note, e(a, :profile, :summary, nil)) 48 | # |> Enums.maybe_put(:display_username, ValueFlows.Util.display_username(a)) 49 | |> add_type() 50 | |> debug() 51 | end 52 | 53 | def agent_location(%{profile_id: profile_id} = a) 54 | when not is_nil(profile_id) do 55 | repo().maybe_preload(a, profile: [:geolocation]) 56 | |> Map.get(:profile) 57 | |> agent_location() 58 | end 59 | 60 | def agent_location(%{geolocation_id: geolocation_id} = a) 61 | when not is_nil(geolocation_id) do 62 | repo().maybe_preload(a, :geolocation) 63 | |> Map.get(:geolocation) 64 | end 65 | 66 | def agent_location(_) do 67 | nil 68 | end 69 | 70 | # def add_type(%ValueFlows.Util.user_schema(){} = a) do 71 | # a 72 | # |> Map.put(:agent_type, :person) 73 | # end 74 | 75 | # def add_type(%ValueFlows.Util.org_schema(){} = a) do 76 | # a 77 | # |> Map.put(:agent_type, :organization) 78 | # end 79 | 80 | def add_type(a) do 81 | user_type = ValueFlows.Util.user_schema() 82 | org_type = ValueFlows.Util.org_schema() 83 | 84 | case a do 85 | # for SharedUser within a User 86 | %{shared_user: %{id: _}} -> 87 | Map.put(a, :agent_type, :organization) 88 | 89 | %{__struct__: user_type} -> 90 | Map.put(a, :agent_type, :person) 91 | 92 | %{__typename: user_type} -> 93 | Map.put(a, :agent_type, :person) 94 | 95 | %{__struct__: org_type} -> 96 | Map.put(a, :agent_type, :organization) 97 | 98 | _ -> 99 | Map.put(a, :agent_type, :person) 100 | end 101 | end 102 | 103 | def add_type(a) do 104 | Map.put(a, :agent_type, :person) 105 | end 106 | end 107 | -------------------------------------------------------------------------------- /lib/agent/organizations.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Agent.Organizations do 3 | # alias ValueFlows.Simulate 4 | import Untangle 5 | import Bonfire.Common.Config, only: [repo: 0] 6 | 7 | def organizations(signed_in_user) do 8 | if Bonfire.Common.Extend.module_enabled?(Organisation.Organisations) do 9 | with {:ok, orgs} = Organisation.Organisations.many([:default, user: signed_in_user]) do 10 | format(orgs) 11 | end 12 | else 13 | if Bonfire.Common.Extend.module_enabled?(Bonfire.Me.Users) do 14 | Bonfire.Me.Users.list(signed_in_user) |> format() 15 | else 16 | error("organizations feature not implemented") 17 | [] 18 | end 19 | end 20 | end 21 | 22 | defp format(orgs) when is_list(orgs), 23 | do: 24 | orgs 25 | |> repo().maybe_preload(:shared_user, label: __MODULE__) 26 | |> Enum.map(&format/1) 27 | |> Enum.reject(fn 28 | %{agent_type: :person} -> true 29 | _ -> false 30 | end) 31 | 32 | defp format(org) do 33 | ValueFlows.Agent.Agents.character_to_agent(org) 34 | end 35 | 36 | def organization(id, current_user) do 37 | if Bonfire.Common.Extend.module_enabled?(Organisation.Organisations) do 38 | with {:ok, org} = 39 | Organisation.Organisations.one([ 40 | :default, 41 | id: id, 42 | user: current_user 43 | ]) do 44 | format(org) 45 | end 46 | else 47 | if Bonfire.Common.Extend.module_enabled?(Bonfire.Me.Users) do 48 | with {:ok, org} <- 49 | Bonfire.Me.Users.by_id(id, current_user: current_user) do 50 | format(org) 51 | else 52 | _ -> 53 | nil 54 | end 55 | else 56 | error("organizations feature not implemented") 57 | %{} 58 | end 59 | end 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /lib/agent/people.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Agent.People do 3 | # alias ValueFlows.{Simulate} 4 | import Untangle 5 | import Bonfire.Common.Config, only: [repo: 0] 6 | 7 | def people(signed_in_user) do 8 | if Bonfire.Common.Extend.module_enabled?(Bonfire.Me.Users) do 9 | Bonfire.Me.Users.list(signed_in_user) |> format() 10 | else 11 | if Bonfire.Common.Extend.module_enabled?(CommonsPub.Users) do 12 | {:ok, users} = CommonsPub.Users.many([:default, user: signed_in_user]) 13 | format(users) 14 | else 15 | error("people feature not implemented") 16 | [] 17 | end 18 | end 19 | end 20 | 21 | defp format(people) when is_list(people), 22 | do: 23 | people 24 | |> repo().maybe_preload(:shared_user, label: __MODULE__) 25 | |> Enum.map(&format/1) 26 | |> Enum.reject(fn 27 | %{agent_type: :organization} -> true 28 | _ -> false 29 | end) 30 | 31 | defp format(person) do 32 | ValueFlows.Agent.Agents.character_to_agent(person) 33 | end 34 | 35 | def person(id, current_user) when is_binary(id) do 36 | if Bonfire.Common.Extend.module_enabled?(Bonfire.Me.Users) do 37 | with {:ok, person} <- 38 | Bonfire.Me.Users.by_id(id, current_user: current_user) do 39 | format(person) 40 | else 41 | _ -> 42 | nil 43 | end 44 | else 45 | if Bonfire.Common.Extend.module_enabled?(CommonsPub.Users) do 46 | with {:ok, person} <- 47 | CommonsPub.Users.one([ 48 | :default, 49 | :geolocation, 50 | id: id, 51 | user: current_user 52 | ]) do 53 | format(person) 54 | else 55 | _ -> 56 | nil 57 | end 58 | else 59 | error("people feature not implemented") 60 | nil 61 | end 62 | end 63 | end 64 | 65 | def person(_, _), do: nil 66 | end 67 | -------------------------------------------------------------------------------- /lib/agreement/agreement.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Agreement do 2 | end 3 | -------------------------------------------------------------------------------- /lib/agreement/graphql.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | if Application.compile_env(:bonfire_api_graphql, :modularity) != :disabled do 3 | defmodule ValueFlows.Agreement.GraphQL do 4 | import Untangle 5 | 6 | # use Absinthe.Schema.Notation 7 | # import_sdl path: "lib/value_flows/graphql/schemas/agreement.gql" 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /lib/all_migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.AllMigrations do 2 | @moduledoc false 3 | @moduledoc """ 4 | Catch-all migrations intended to be used to initialise new Bonfire apps. 5 | Add any new up/down ecto migrations in VF modules to the bottom of these two functions. 6 | """ 7 | def up do 8 | ValueFlows.Planning.Intent.Migrations.up() 9 | 10 | ValueFlows.Proposal.Migrations.up() 11 | 12 | ValueFlows.Knowledge.ResourceSpecification.Migrations.up() 13 | ValueFlows.Knowledge.ProcessSpecification.Migrations.up() 14 | 15 | ValueFlows.ValueCalculation.Migrations.up() 16 | 17 | ValueFlows.EconomicResource.Migrations.up() 18 | ValueFlows.Process.Migrations.up() 19 | ValueFlows.EconomicEvent.Migrations.up() 20 | 21 | ValueFlows.Planning.Intent.Migrations.add_references() 22 | 23 | ValueFlows.Claim.Migrations.up() 24 | 25 | ValueFlows.Planning.Commitment.Migrations.up() 26 | ValueFlows.Planning.Satisfaction.Migrations.up() 27 | end 28 | 29 | def down do 30 | ValueFlows.Planning.Intent.Migrations.down() 31 | 32 | ValueFlows.Proposal.Migrations.down() 33 | 34 | ValueFlows.Knowledge.ResourceSpecification.Migrations.down() 35 | ValueFlows.Knowledge.ProcessSpecification.Migrations.down() 36 | 37 | ValueFlows.EconomicResource.Migrations.down() 38 | ValueFlows.Process.Migrations.down() 39 | ValueFlows.EconomicEvent.Migrations.down() 40 | 41 | ValueFlows.ValueCalculation.Migrations.down() 42 | 43 | ValueFlows.Claim.Migrations.down() 44 | ValueFlows.Planning.Commitment.Migrations.down() 45 | ValueFlows.Planning.Satisfaction.Migrations.down() 46 | end 47 | end 48 | -------------------------------------------------------------------------------- /lib/appreciation/graphql.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | if Application.compile_env(:bonfire_api_graphql, :modularity) != :disabled do 3 | defmodule ValueFlows.Appreciation.GraphQL do 4 | import Untangle 5 | 6 | # use Absinthe.Schema.Notation 7 | # import_sdl path: "lib/value_flows/graphql/schemas/appreciation.gql" 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /lib/claim/claim.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Claim do 3 | use Needle.Pointable, 4 | otp_app: :bonfire_valueflows, 5 | source: "vf_claim", 6 | table_id: "40MM0NSPVBVA1VEF10WSC1A1MS" 7 | 8 | import Bonfire.Common.Repo.Utils, only: [change_public: 1, change_disabled: 1] 9 | 10 | alias Ecto.Changeset 11 | 12 | alias Bonfire.Quantify.Measure 13 | 14 | alias ValueFlows.Knowledge.Action 15 | alias ValueFlows.Knowledge.ResourceSpecification 16 | alias ValueFlows.EconomicEvent 17 | 18 | # @type t :: %__MODULE__{} 19 | 20 | pointable_schema do 21 | field(:note, :string) 22 | field(:agreed_in, :string) 23 | field(:finished, :boolean) 24 | field(:created, :utc_datetime_usec) 25 | field(:due, :utc_datetime_usec) 26 | field(:resource_classified_as, {:array, :string}, virtual: true) 27 | 28 | belongs_to(:action, Action, type: :string) 29 | belongs_to(:provider, ValueFlows.Util.user_or_org_schema()) 30 | belongs_to(:receiver, ValueFlows.Util.user_or_org_schema()) 31 | belongs_to(:resource_quantity, Measure, on_replace: :nilify) 32 | belongs_to(:effort_quantity, Measure, on_replace: :nilify) 33 | 34 | belongs_to(:resource_conforms_to, ResourceSpecification) 35 | belongs_to(:triggered_by, EconomicEvent) 36 | 37 | # a.k.a. in_scope_of 38 | belongs_to(:context, Needle.Pointer) 39 | 40 | # not defined in spec, used internally 41 | belongs_to(:creator, ValueFlows.Util.user_schema()) 42 | field(:is_public, :boolean, virtual: true) 43 | field(:published_at, :utc_datetime_usec) 44 | field(:is_disabled, :boolean, virtual: true, default: false) 45 | field(:disabled_at, :utc_datetime_usec) 46 | field(:deleted_at, :utc_datetime_usec) 47 | 48 | timestamps(inserted_at: false) 49 | end 50 | 51 | @required ~w(action_id)a 52 | @cast @required ++ 53 | ~w(note finished agreed_in created due resource_classified_as is_disabled)a ++ 54 | ~w(context_id resource_conforms_to_id triggered_by_id)a 55 | 56 | def create_changeset( 57 | %{} = creator, 58 | %{id: _} = provider, 59 | %{id: _} = receiver, 60 | attrs 61 | ) do 62 | create_changeset(creator, attrs) 63 | |> Changeset.change( 64 | provider_id: provider.id, 65 | receiver_id: receiver.id 66 | ) 67 | end 68 | 69 | def create_changeset(%{} = creator, attrs) do 70 | %__MODULE__{} 71 | |> Changeset.cast(attrs, @cast) 72 | |> Changeset.change( 73 | creator_id: creator.id, 74 | is_public: true 75 | ) 76 | |> common_changeset(attrs) 77 | end 78 | 79 | def update_changeset(%__MODULE__{} = claim, attrs) do 80 | claim 81 | |> Changeset.cast(attrs, @cast) 82 | |> common_changeset(attrs) 83 | end 84 | 85 | def validate_required(changeset) do 86 | Changeset.validate_required(changeset, @required) 87 | end 88 | 89 | defp common_changeset(changeset, attrs) do 90 | changeset 91 | |> ValueFlows.Util.change_measures(attrs, measure_fields()) 92 | |> change_public() 93 | |> change_disabled() 94 | end 95 | 96 | def measure_fields, do: [:resource_quantity, :effort_quantity] 97 | end 98 | -------------------------------------------------------------------------------- /lib/claim/claim_queries.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Claim.Queries do 3 | import Bonfire.Common.Repo.Utils, only: [match_admin: 0] 4 | import Ecto.Query 5 | import Untangle 6 | 7 | alias ValueFlows.Claim 8 | 9 | def query(Claim) do 10 | from(c in Claim, as: :claim) 11 | end 12 | 13 | def query(filters), do: query(Claim, filters) 14 | 15 | def query(q, filters), do: filter(query(q), filters) 16 | 17 | def queries(query, _page_opts, base_filters, data_filters, count_filters) do 18 | base_q = query(query, base_filters) 19 | data_q = filter(base_q, data_filters) 20 | count_q = filter(base_q, count_filters) 21 | {data_q, count_q} 22 | end 23 | 24 | def join_to(q, spec, join_qualifier \\ :left) 25 | 26 | def join_to(q, specs, jq) when is_list(specs) do 27 | Enum.reduce(specs, q, &join_to(&2, &1, jq)) 28 | end 29 | 30 | def join_to(q, :context, jq) do 31 | join(q, jq, [claim: c], c2 in assoc(c, :context), as: :context) 32 | end 33 | 34 | def filter(q, filters) when is_list(filters) do 35 | Enum.reduce(filters, q, &filter(&2, &1)) 36 | end 37 | 38 | def filter(q, {:join, {join, qual}}), do: join_to(q, join, qual) 39 | def filter(q, {:join, join}), do: join_to(q, join) 40 | 41 | ## by status 42 | 43 | def filter(q, :default) do 44 | filter(q, [:deleted]) 45 | end 46 | 47 | def filter(q, :deleted) do 48 | where(q, [claim: c], is_nil(c.deleted_at)) 49 | end 50 | 51 | def filter(q, :disabled) do 52 | where(q, [claim: c], is_nil(c.disabled_at)) 53 | end 54 | 55 | def filter(q, :private) do 56 | where(q, [claim: c], not is_nil(c.published_at)) 57 | end 58 | 59 | ## by user 60 | 61 | def filter(q, {:creator, match_admin()}), do: q 62 | 63 | def filter(q, {:creator, nil}) do 64 | filter(q, ~w(disabled private)a) 65 | end 66 | 67 | def filter(q, {:creator, %{id: user_id}}) do 68 | q 69 | |> where([claim: c], not is_nil(c.published_at) or c.creator_id == ^user_id) 70 | |> filter(~w(disabled)a) 71 | end 72 | 73 | ## by field values 74 | 75 | def filter(q, {:id, id}) when is_binary(id) do 76 | where(q, [claim: c], c.id == ^id) 77 | end 78 | 79 | def filter(q, {:id, ids}) when is_list(ids) do 80 | where(q, [claim: c], c.id in ^ids) 81 | end 82 | 83 | def filter(q, {:provider_id, id}) when is_binary(id) do 84 | where(q, [claim: c], c.provider_id == ^id) 85 | end 86 | 87 | def filter(q, {:provider_id, ids}) when is_list(ids) do 88 | where(q, [claim: c], c.provider_id in ^ids) 89 | end 90 | 91 | def filter(q, {:receiver_id, id}) when is_binary(id) do 92 | where(q, [claim: c], c.receiver_id == ^id) 93 | end 94 | 95 | def filter(q, {:receiver_id, ids}) when is_list(ids) do 96 | where(q, [claim: c], c.receiver_id in ^ids) 97 | end 98 | 99 | def filter(q, {:context_id, id}) when is_binary(id) do 100 | where(q, [claim: c], c.context_id == ^id) 101 | end 102 | 103 | def filter(q, {:context_id, ids}) when is_list(ids) do 104 | where(q, [claim: c], c.context_id in ^ids) 105 | end 106 | 107 | def filter(q, {:action_id, ids}) when is_list(ids) do 108 | where(q, [claim: c], c.action_id in ^ids) 109 | end 110 | 111 | def filter(q, {:action_id, id}) when is_binary(id) do 112 | where(q, [claim: c], c.action_id == ^id) 113 | end 114 | 115 | ## preloading 116 | 117 | def filter(q, {:preload, :all}) do 118 | preload(q, [ 119 | :creator, 120 | :provider, 121 | :receiver, 122 | :resource_conforms_to, 123 | :resource_quantity, 124 | :effort_quantity, 125 | :context, 126 | :triggered_by 127 | ]) 128 | end 129 | 130 | def filter(q, other_filter), 131 | do: ValueFlows.Util.common_filters(q, other_filter) 132 | end 133 | -------------------------------------------------------------------------------- /lib/claim/claims.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Claim.Claims do 3 | use Bonfire.Common.Utils, 4 | only: [maybe: 2] 5 | 6 | import Bonfire.Common.Config, only: [repo: 0] 7 | 8 | alias ValueFlows.Claim 9 | alias ValueFlows.Claim.Queries 10 | 11 | alias Bonfire.Common.Needles 12 | 13 | @behaviour Bonfire.Federate.ActivityPub.FederationModules 14 | def federation_module, do: ["ValueFlows:Claim", "Claim"] 15 | 16 | def one(filters), do: repo().single(Queries.query(Claim, filters)) 17 | 18 | def many(filters \\ []), do: {:ok, repo().many(Queries.query(Claim, filters))} 19 | 20 | def preload_all(%Claim{} = claim) do 21 | # shouldn't fail 22 | {:ok, claim} = one(id: claim.id, preload: :all) 23 | claim 24 | end 25 | 26 | def create( 27 | %{} = creator, 28 | %{id: _} = provider, 29 | %{id: _} = receiver, 30 | %{} = attrs 31 | ) do 32 | repo().transact_with(fn -> 33 | attrs = prepare_attrs(attrs) 34 | 35 | with {:ok, provider_ptr} <- 36 | Needles.one(id: provider.id, skip_boundary_check: true), 37 | {:ok, receiver_ptr} <- 38 | Needles.one(id: receiver.id, skip_boundary_check: true) do 39 | Claim.create_changeset(creator, provider_ptr, receiver_ptr, attrs) 40 | |> Claim.validate_required() 41 | |> repo().insert() 42 | |> Errors.maybe_ok_error(&preload_all/1) 43 | end 44 | end) 45 | end 46 | 47 | def create(%{} = creator, %{} = attrs) do 48 | repo().transact_with(fn -> 49 | attrs = prepare_attrs(attrs) 50 | 51 | with {:ok, claim} <- 52 | Claim.create_changeset(creator, attrs) 53 | |> Claim.validate_required() 54 | |> repo().insert() do 55 | preload_all(claim) 56 | end 57 | end) 58 | end 59 | 60 | def update(%Claim{} = claim, %{} = attrs) do 61 | repo().transact_with(fn -> 62 | attrs = prepare_attrs(attrs) 63 | 64 | claim 65 | |> Claim.update_changeset(attrs) 66 | |> repo().update() 67 | |> Errors.maybe_ok_error(&preload_all/1) 68 | end) 69 | end 70 | 71 | def soft_delete(%Claim{} = claim) do 72 | Bonfire.Common.Repo.Delete.soft_delete(claim) 73 | end 74 | 75 | defp prepare_attrs(attrs) do 76 | attrs 77 | |> Enums.maybe_put( 78 | :action_id, 79 | Enums.attr_get_id(attrs, :action) |> ValueFlows.Knowledge.Action.Actions.id() 80 | ) 81 | |> Enums.maybe_put( 82 | :context_id, 83 | attrs |> Map.get(:in_scope_of) |> maybe(&List.first/1) 84 | ) 85 | |> Enums.maybe_put( 86 | :resource_conforms_to_id, 87 | Enums.attr_get_id(attrs, :resource_conforms_to) 88 | ) 89 | |> Enums.maybe_put(:triggered_by_id, Enums.attr_get_id(attrs, :triggered_by)) 90 | end 91 | 92 | def ap_publish_activity(subject, activity_name, thing) do 93 | ValueFlows.Util.Federation.ap_publish_activity( 94 | subject, 95 | activity_name, 96 | :claim, 97 | thing, 98 | 3, 99 | [] 100 | ) 101 | end 102 | 103 | def ap_receive_activity(creator, activity, object) do 104 | ValueFlows.Util.Federation.ap_receive_activity( 105 | creator, 106 | activity, 107 | object, 108 | &create/2 109 | ) 110 | end 111 | end 112 | -------------------------------------------------------------------------------- /lib/claim/graphql.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | if Code.ensure_loaded?(Bonfire.API.GraphQL) do 3 | defmodule ValueFlows.Claim.GraphQL do 4 | import Untangle 5 | 6 | import Bonfire.Common.Config, only: [repo: 0] 7 | 8 | alias Bonfire.Common.Needles 9 | alias Bonfire.API.GraphQL 10 | alias Bonfire.API.GraphQL.FetchPage 11 | alias Bonfire.API.GraphQL.ResolveField 12 | alias Bonfire.API.GraphQL.ResolveRootPage 13 | 14 | alias ValueFlows.Claim.Claims 15 | 16 | def claim(%{id: id}, info) do 17 | ResolveField.run(%ResolveField{ 18 | module: __MODULE__, 19 | fetcher: :fetch_claim, 20 | context: id, 21 | info: info 22 | }) 23 | end 24 | 25 | def claims(page_opts, info) do 26 | ResolveRootPage.run(%ResolveRootPage{ 27 | module: __MODULE__, 28 | fetcher: :fetch_claims, 29 | page_opts: page_opts, 30 | info: info, 31 | cursor_validators: [ 32 | &(is_integer(&1) and &1 >= 0), 33 | &Needle.UID.cast/1 34 | ] 35 | }) 36 | end 37 | 38 | def fetch_claim(_info, id) do 39 | Claims.one([:default, id: id]) 40 | end 41 | 42 | def fetch_events(page_opts, info) do 43 | FetchPage.run(%FetchPage{ 44 | queries: ValueFlows.Claim.Queries, 45 | query: ValueFlows.Claim, 46 | page_opts: page_opts, 47 | base_filters: [ 48 | :default, 49 | creator: GraphQL.current_user(info) 50 | ] 51 | }) 52 | end 53 | 54 | def fetch_triggered_by_edge(%{triggered_by_id: id} = thing, _, _) 55 | when is_binary(id) do 56 | thing = repo().preload(thing, :triggered_by) 57 | {:ok, Map.get(thing, :triggered_by)} 58 | end 59 | 60 | def fetch_triggered_by_edge(_, _, _) do 61 | {:ok, nil} 62 | end 63 | 64 | def create_claim( 65 | %{claim: %{provider: provider_id, receiver: receiver_id} = attrs}, 66 | info 67 | ) do 68 | with {:ok, user} <- GraphQL.current_user_or_not_logged_in(info), 69 | {:ok, provider} <- 70 | Needles.one(id: provider_id, skip_boundary_check: true), 71 | {:ok, receiver} <- 72 | Needles.one(id: receiver_id, skip_boundary_check: true), 73 | {:ok, claim} <- Claims.create(user, provider, receiver, attrs) do 74 | {:ok, %{claim: claim}} 75 | end 76 | end 77 | 78 | def update_claim(%{claim: %{id: id} = attrs}, info) do 79 | with :ok <- GraphQL.is_authenticated(info), 80 | {:ok, claim} <- claim(%{id: id}, info), 81 | {:ok, claim} <- Claims.update(claim, attrs) do 82 | {:ok, %{claim: claim}} 83 | end 84 | end 85 | 86 | def delete_claim(%{id: id}, info) do 87 | with :ok <- GraphQL.is_authenticated(info), 88 | {:ok, claim} <- claim(%{id: id}, info), 89 | {:ok, _} <- Claims.soft_delete(claim) do 90 | {:ok, true} 91 | end 92 | end 93 | end 94 | end 95 | -------------------------------------------------------------------------------- /lib/claim/migrations.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Claim.Migrations do 3 | @moduledoc false 4 | use Ecto.Migration 5 | 6 | import Needle.Migration 7 | 8 | alias ValueFlows.Knowledge.ResourceSpecification 9 | alias ValueFlows.EconomicEvent 10 | 11 | def up do 12 | create_pointable_table(ValueFlows.Claim) do 13 | add(:note, :text) 14 | add(:agreed_in, :text) 15 | add(:action_id, :string) 16 | 17 | add(:finished, :boolean) 18 | add(:created, :timestamptz) 19 | add(:due, :timestamptz) 20 | 21 | add(:provider_id, weak_pointer(), null: true) 22 | add(:receiver_id, weak_pointer(), null: true) 23 | 24 | add(:resource_conforms_to_id, weak_pointer(ResourceSpecification), null: true) 25 | 26 | add(:triggered_by_id, weak_pointer(EconomicEvent), null: true) 27 | 28 | add(:resource_quantity_id, weak_pointer(Bonfire.Quantify.Measure), null: true) 29 | 30 | add(:effort_quantity_id, weak_pointer(Bonfire.Quantify.Measure), null: true) 31 | 32 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema()), null: true) 33 | add(:context_id, weak_pointer(), null: true) 34 | 35 | add(:published_at, :timestamptz) 36 | add(:deleted_at, :timestamptz) 37 | add(:disabled_at, :timestamptz) 38 | 39 | timestamps(inserted_at: false, type: :utc_datetime_usec) 40 | end 41 | end 42 | 43 | def down do 44 | drop_pointable_table(ValueFlows.Claim) 45 | end 46 | end 47 | -------------------------------------------------------------------------------- /lib/economic_event/event.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.EconomicEvent do 2 | use Needle.Pointable, 3 | otp_app: :bonfire_valueflows, 4 | source: "vf_event", 5 | table_id: "2CTVA10BSERVEDF10WS0FVA1VE" 6 | 7 | import Bonfire.Common.Repo.Utils, only: [change_public: 1, change_disabled: 1] 8 | 9 | alias Ecto.Changeset 10 | 11 | alias ValueFlows.Knowledge.Action 12 | alias ValueFlows.Knowledge.ResourceSpecification 13 | alias ValueFlows.EconomicEvent 14 | alias ValueFlows.EconomicResource 15 | alias ValueFlows.Process 16 | alias ValueFlows.ValueCalculation 17 | 18 | alias Bonfire.Quantify.Measure 19 | 20 | # @type t :: %__MODULE__{} 21 | 22 | pointable_schema do 23 | field(:note, :string) 24 | 25 | # TODO: link to Agreement? 26 | field(:agreed_in, :string) 27 | 28 | field(:has_beginning, :utc_datetime_usec) 29 | field(:has_end, :utc_datetime_usec) 30 | field(:has_point_in_time, :utc_datetime_usec) 31 | 32 | belongs_to(:action, Action, type: :string) 33 | 34 | belongs_to(:input_of, Process) 35 | belongs_to(:output_of, Process) 36 | 37 | belongs_to(:provider, ValueFlows.Util.user_or_org_schema()) 38 | belongs_to(:receiver, ValueFlows.Util.user_or_org_schema()) 39 | 40 | belongs_to(:resource_inventoried_as, EconomicResource) 41 | belongs_to(:to_resource_inventoried_as, EconomicResource) 42 | 43 | field(:resource_classified_as, {:array, :string}, virtual: true) 44 | 45 | belongs_to(:resource_conforms_to, ResourceSpecification) 46 | 47 | belongs_to(:resource_quantity, Measure, on_replace: :nilify) 48 | belongs_to(:effort_quantity, Measure, on_replace: :nilify) 49 | 50 | belongs_to(:context, Needle.Pointer) 51 | 52 | belongs_to(:at_location, Bonfire.Geolocate.Geolocation) 53 | 54 | belongs_to(:triggered_by, EconomicEvent) 55 | 56 | belongs_to(:calculated_using, ValueCalculation) 57 | 58 | # TODO: 59 | # track: [ProductionFlowItem!] 60 | # trace: [ProductionFlowItem!] 61 | # realizationOf: Agreement 62 | # appreciationOf: [Appreciation!] 63 | # appreciatedBy: [Appreciation!] 64 | # fulfills: [Fulfillment!] 65 | # satisfies: [Satisfaction!] 66 | # field(:deletable, :boolean) # TODO - virtual field? how is it calculated? 67 | 68 | belongs_to(:creator, ValueFlows.Util.user_schema()) 69 | 70 | field(:is_public, :boolean, virtual: true) 71 | field(:published_at, :utc_datetime_usec) 72 | field(:is_disabled, :boolean, virtual: true, default: false) 73 | field(:disabled_at, :utc_datetime_usec) 74 | field(:deleted_at, :utc_datetime_usec) 75 | 76 | timestamps(inserted_at: false) 77 | end 78 | 79 | @required ~w(action_id provider_id receiver_id is_public)a 80 | @cast @required ++ 81 | ~w(note resource_classified_as agreed_in has_beginning has_end has_point_in_time is_disabled)a ++ 82 | ~w(input_of_id output_of_id resource_conforms_to_id resource_inventoried_as_id to_resource_inventoried_as_id)a ++ 83 | ~w(triggered_by_id at_location_id context_id calculated_using_id)a 84 | 85 | def create_changeset( 86 | %{} = creator, 87 | attrs 88 | ) do 89 | validate_changeset(attrs) 90 | |> Changeset.change(creator_id: creator.id) 91 | end 92 | 93 | def validate_changeset(attrs \\ %{}) do 94 | %__MODULE__{} 95 | |> Changeset.cast(attrs, @cast) 96 | |> ValueFlows.Util.change_measures(attrs, measure_fields()) 97 | |> validate_create_changeset() 98 | end 99 | 100 | def validate_create_changeset(cs) do 101 | cs 102 | |> Changeset.change(is_public: true) 103 | |> Changeset.validate_required(@required) 104 | |> common_changeset() 105 | end 106 | 107 | def update_changeset(%EconomicEvent{} = event, attrs) do 108 | event 109 | |> Changeset.cast(attrs, @cast) 110 | |> ValueFlows.Util.change_measures(attrs, measure_fields()) 111 | |> common_changeset() 112 | end 113 | 114 | def measure_fields do 115 | [:resource_quantity, :effort_quantity] 116 | end 117 | 118 | defp common_changeset(changeset) do 119 | changeset 120 | |> Changeset.change(is_public: true) 121 | |> change_public() 122 | |> change_disabled() 123 | |> Changeset.foreign_key_constraint( 124 | :resource_inventoried_as_id, 125 | name: :vf_event_resource_inventoried_as_id_fkey 126 | ) 127 | |> Changeset.foreign_key_constraint( 128 | :to_resource_inventoried_as_id, 129 | name: :vf_event_to_resource_inventoried_as_id_fkey 130 | ) 131 | end 132 | 133 | @behaviour Bonfire.Common.SchemaModule 134 | def context_module, do: ValueFlows.EconomicEvent.EconomicEvents 135 | def query_module, do: ValueFlows.EconomicEvent.Queries 136 | 137 | def follow_filters, do: [:default] 138 | end 139 | -------------------------------------------------------------------------------- /lib/economic_event/migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.EconomicEvent.Migrations do 2 | @moduledoc false 3 | use Ecto.Migration 4 | # alias Needle.ULID 5 | import Needle.Migration 6 | 7 | alias ValueFlows.Knowledge.ResourceSpecification 8 | alias ValueFlows.EconomicEvent 9 | alias ValueFlows.EconomicResource 10 | alias ValueFlows.Process 11 | 12 | # defp event_table(), do: EconomicEvent.__schema__(:source) 13 | 14 | def up do 15 | create_pointable_table(ValueFlows.EconomicEvent) do 16 | # add(:name, :string) 17 | add(:note, :text) 18 | 19 | # add(:image_id, weak_pointer(ValueFlows.Util.image_schema()), null: true) 20 | 21 | add(:action_id, :string) 22 | 23 | add(:input_of_id, weak_pointer(Process), null: true) 24 | add(:output_of_id, weak_pointer(Process), null: true) 25 | 26 | add(:provider_id, weak_pointer(), null: true) 27 | add(:receiver_id, weak_pointer(), null: true) 28 | 29 | add(:resource_inventoried_as_id, weak_pointer(EconomicResource), null: true) 30 | 31 | add(:to_resource_inventoried_as_id, weak_pointer(EconomicResource), null: true) 32 | 33 | # add(:resource_classified_as, {:array, :string}, virtual: true) 34 | 35 | add(:resource_conforms_to_id, weak_pointer(ResourceSpecification), null: true) 36 | 37 | add(:resource_quantity_id, weak_pointer(Bonfire.Quantify.Measure), null: true) 38 | 39 | add(:effort_quantity_id, weak_pointer(Bonfire.Quantify.Measure), null: true) 40 | 41 | add(:has_beginning, :timestamptz) 42 | add(:has_end, :timestamptz) 43 | add(:has_point_in_time, :timestamptz) 44 | 45 | # optional context as in_scope_of 46 | add(:context_id, weak_pointer(), null: true) 47 | 48 | # TODO: use string or link to Agreement? 49 | add(:agreed_in, :string) 50 | # belongs_to(:agreed_in, Agreement) 51 | 52 | add(:at_location_id, weak_pointer(Bonfire.Geolocate.Geolocation), null: true) 53 | 54 | add(:triggered_by_id, weak_pointer(EconomicEvent), null: true) 55 | 56 | add(:calculated_using_id, weak_pointer(ValueFlows.ValueCalculation), null: true) 57 | 58 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema()), null: true) 59 | 60 | add(:published_at, :timestamptz) 61 | add(:deleted_at, :timestamptz) 62 | add(:disabled_at, :timestamptz) 63 | 64 | timestamps(inserted_at: false, type: :utc_datetime_usec) 65 | end 66 | end 67 | 68 | def down do 69 | drop_pointable_table(ValueFlows.EconomicEvent) 70 | end 71 | end 72 | -------------------------------------------------------------------------------- /lib/economic_resource/migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.EconomicResource.Migrations do 2 | @moduledoc false 3 | use Ecto.Migration 4 | # alias Needle.ULID 5 | import Needle.Migration 6 | 7 | alias ValueFlows.Knowledge.ResourceSpecification 8 | alias ValueFlows.Knowledge.ProcessSpecification 9 | alias ValueFlows.EconomicResource 10 | # alias ValueFlows.EconomicEvent 11 | # alias ValueFlows.Process 12 | 13 | # defp resource_table(), do: EconomicResource.__schema__(:source) 14 | 15 | def up do 16 | create_pointable_table(ValueFlows.EconomicResource) do 17 | add(:name, :string) 18 | add(:note, :text) 19 | add(:tracking_identifier, :text) 20 | 21 | add(:image_id, weak_pointer(ValueFlows.Util.image_schema()), null: true) 22 | 23 | add(:conforms_to_id, weak_pointer(ResourceSpecification), null: true) 24 | 25 | # add(:resource_classified_as, {:array, :string}, virtual: true) 26 | 27 | add(:current_location_id, weak_pointer(Bonfire.Geolocate.Geolocation), null: true) 28 | 29 | add(:contained_in_id, weak_pointer(EconomicResource), null: true) 30 | 31 | add(:state_id, :string) 32 | 33 | # usually linked to Agent 34 | add(:primary_accountable_id, weak_pointer(), null: true) 35 | 36 | add(:accounting_quantity_id, weak_pointer(Bonfire.Quantify.Measure), null: true) 37 | 38 | add(:onhand_quantity_id, weak_pointer(Bonfire.Quantify.Measure), null: true) 39 | 40 | add(:unit_of_effort_id, weak_pointer(Bonfire.Quantify.Unit), null: true) 41 | 42 | add(:stage_id, weak_pointer(ProcessSpecification), null: true) 43 | 44 | # optional context as in_scope_of 45 | add(:context_id, weak_pointer(), null: true) 46 | 47 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema()), null: true) 48 | 49 | add(:published_at, :timestamptz) 50 | add(:deleted_at, :timestamptz) 51 | add(:disabled_at, :timestamptz) 52 | 53 | timestamps(inserted_at: false, type: :utc_datetime_usec) 54 | end 55 | end 56 | 57 | def down do 58 | drop_pointable_table(ValueFlows.EconomicResource) 59 | end 60 | end 61 | -------------------------------------------------------------------------------- /lib/economic_resource/resource.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.EconomicResource do 2 | use Needle.Pointable, 3 | otp_app: :bonfire_valueflows, 4 | source: "vf_resource", 5 | table_id: "2N0BSERVEDANDVSEFV1RES0VRC" 6 | 7 | import Bonfire.Common.Repo.Utils, only: [change_public: 1, change_disabled: 1] 8 | alias Ecto.Changeset 9 | 10 | alias Bonfire.Quantify.Measure 11 | alias Bonfire.Quantify.Unit 12 | 13 | alias ValueFlows.Knowledge.Action 14 | alias ValueFlows.Knowledge.ResourceSpecification 15 | # alias ValueFlows.Knowledge.ProcessSpecification 16 | 17 | alias ValueFlows.EconomicResource 18 | alias Bonfire.Common 19 | alias Common.Types 20 | 21 | # @type t :: %__MODULE__{} 22 | 23 | pointable_schema do 24 | field(:name, :string) 25 | field(:note, :string) 26 | field(:tracking_identifier, :string) 27 | 28 | belongs_to(:image, Bonfire.Files.Media) 29 | 30 | field(:classified_as, {:array, :string}, virtual: true) 31 | 32 | belongs_to(:conforms_to, ResourceSpecification) 33 | 34 | belongs_to(:current_location, Bonfire.Geolocate.Geolocation) 35 | 36 | belongs_to(:contained_in, EconomicResource) 37 | 38 | belongs_to(:state, Action, type: :string) 39 | 40 | belongs_to(:primary_accountable, ValueFlows.Util.user_or_org_schema()) 41 | 42 | belongs_to(:accounting_quantity, Measure, on_replace: :nilify) 43 | belongs_to(:onhand_quantity, Measure, on_replace: :nilify) 44 | 45 | belongs_to(:unit_of_effort, Unit, on_replace: :nilify) 46 | 47 | # has_many(:inputs, EconomicEvent, foreign_key: :resource_inventoried_as_id, references: :id) 48 | # has_many(:outputs, EconomicEvent, foreign_key: :to_resource_inventoried_as_id, references: :id) 49 | 50 | # TODO relations: 51 | # lot: ProductBatch 52 | # belongs_to(:stage, ProcessSpecification) 53 | # field(:deletable, :boolean) # TODO - virtual field? how is it calculated? 54 | 55 | belongs_to(:creator, ValueFlows.Util.user_schema()) 56 | 57 | field(:is_public, :boolean, virtual: true) 58 | field(:published_at, :utc_datetime_usec) 59 | field(:is_disabled, :boolean, virtual: true, default: false) 60 | field(:disabled_at, :utc_datetime_usec) 61 | field(:deleted_at, :utc_datetime_usec) 62 | 63 | timestamps(inserted_at: false) 64 | end 65 | 66 | @required ~w(name is_public)a 67 | @cast @required ++ 68 | ~w(note tracking_identifier current_location_id is_disabled image_id)a ++ 69 | ~w(primary_accountable_id state_id contained_in_id unit_of_effort_id conforms_to_id current_location_id)a 70 | 71 | def create_changeset( 72 | creator, 73 | attrs 74 | ) do 75 | %EconomicResource{} 76 | |> Changeset.cast(attrs, @cast) 77 | |> Changeset.change( 78 | creator_id: Bonfire.Common.Types.uid(creator), 79 | is_public: true 80 | ) 81 | |> Changeset.validate_required(@required) 82 | |> common_changeset(attrs) 83 | end 84 | 85 | def update_changeset(%EconomicResource{} = resource, attrs) do 86 | resource 87 | |> Changeset.cast(attrs, @cast) 88 | |> common_changeset(attrs) 89 | end 90 | 91 | def measure_fields do 92 | [:onhand_quantity, :accounting_quantity] 93 | end 94 | 95 | defp common_changeset(changeset, attrs) do 96 | changeset 97 | |> ValueFlows.Util.change_measures(attrs, measure_fields()) 98 | |> change_public() 99 | |> change_disabled() 100 | end 101 | 102 | @behaviour Bonfire.Common.SchemaModule 103 | def context_module, do: ValueFlows.EconomicResource.EconomicResources 104 | def query_module, do: ValueFlows.EconomicResource.Queries 105 | 106 | def follow_filters, do: [:default] 107 | end 108 | -------------------------------------------------------------------------------- /lib/knowledge/action/action.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Knowledge.Action do 2 | use Ecto.Schema 3 | 4 | # import Bonfire.Common.Repo.Utils, only: [change_public: 1, change_disabled: 1] 5 | 6 | # import Ecto.Enum 7 | 8 | # alias Ecto.Changeset 9 | # alias ValueFlows.Knowledge.Action 10 | 11 | # defenum label_enum, work: 0, produce: 1, consume: 2, use: 3, consume: 4, transfer: 5 12 | 13 | # @type t :: %__MODULE__{} 14 | 15 | @primary_key {:id, :string, autogenerate: false} 16 | embedded_schema do 17 | # A unique verb which defines the action. 18 | field(:label, :string) 19 | 20 | # Denotes if a process input or output, or not related to a process. 21 | field(:input_output, :string) 22 | 23 | # enum: "input", "output", "notApplicable" 24 | 25 | # The action that should be included on the other direction of the process, for example accept with modify. 26 | field(:pairs_with, :string) 27 | 28 | # possible values: "notApplicable" (null), or any of the actions (foreign key) 29 | # TODO: do we want to do this as an actual Action (optional)? In the VF spec they are NamedIndividuals defined in the spec, including "notApplicable". 30 | 31 | # The effect of an economic event on a resource, increment, decrement, no effect, or decrement resource and increment 'to' resource 32 | field(:resource_effect, :string) 33 | 34 | # enum: "increment", "decrement", "noEffect", "decrementIncrement" 35 | 36 | field(:onhand_effect, :string) 37 | 38 | # description of the action (not part of VF) 39 | field(:note, :string) 40 | 41 | timestamps() 42 | end 43 | 44 | # @required ~w(label resource_effect)a 45 | # @cast @required ++ ~w(input_output pairs_with note)a 46 | 47 | # def create_changeset(attrs) do 48 | # %Action{} 49 | # |> Changeset.cast(attrs, @cast) 50 | # |> Changeset.validate_required(@required) 51 | # |> common_changeset() 52 | # end 53 | 54 | # def update_changeset(%Action{} = action, attrs) do 55 | # action 56 | # |> Changeset.cast(attrs, @cast) 57 | # |> common_changeset() 58 | # end 59 | 60 | # defp common_changeset(changeset) do 61 | # changeset 62 | # end 63 | end 64 | -------------------------------------------------------------------------------- /lib/knowledge/action/graphql.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | if Application.compile_env(:bonfire_api_graphql, :modularity) != :disabled do 3 | defmodule ValueFlows.Knowledge.Action.GraphQL do 4 | import Untangle 5 | 6 | # use Absinthe.Schema.Notation 7 | # import_sdl path: "lib/value_flows/graphql/schemas/knowledge.gql" 8 | 9 | def action(%{id: id}, _) do 10 | # {:ok, Simulate.action()} 11 | ValueFlows.Knowledge.Action.Actions.action(id) 12 | end 13 | 14 | def all_actions(_, _) do 15 | {:ok, ValueFlows.Knowledge.Action.Actions.actions_list()} 16 | # {:ok, long_list(&Simulate.action/0)} 17 | end 18 | 19 | def action_edge(%{action_id: id}, _, _) when not is_nil(id) do 20 | action(%{id: id}, nil) 21 | end 22 | 23 | def action_edge(_, _, _) do 24 | {:ok, nil} 25 | end 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /lib/knowledge/process_specification/migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Knowledge.ProcessSpecification.Migrations do 2 | @moduledoc false 3 | use Ecto.Migration 4 | # import Bonfire.Common.Config, only: [repo: 0] 5 | # alias Needle.ULID 6 | import Needle.Migration 7 | 8 | # alias ValueFlows.Knowledge.ProcessSpecification 9 | 10 | # defp resource_table(), do: EconomicResource.__schema__(:source) 11 | 12 | def up do 13 | create_pointable_table(ValueFlows.Knowledge.ProcessSpecification) do 14 | add(:name, :string) 15 | add(:note, :text) 16 | 17 | add(:image_id, weak_pointer(ValueFlows.Util.image_schema()), null: true) 18 | 19 | # add(:resource_classified_as, {:array, :string}, virtual: true) 20 | 21 | # optional context as in_scope_of 22 | add(:context_id, weak_pointer(), null: true) 23 | 24 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema()), null: true) 25 | 26 | add(:published_at, :timestamptz) 27 | add(:deleted_at, :timestamptz) 28 | add(:disabled_at, :timestamptz) 29 | 30 | timestamps(inserted_at: false, type: :utc_datetime_usec) 31 | end 32 | end 33 | 34 | def down do 35 | drop_pointable_table(ValueFlows.Knowledge.ResourceSpecification) 36 | end 37 | end 38 | -------------------------------------------------------------------------------- /lib/knowledge/process_specification/process_spec.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Knowledge.ProcessSpecification do 2 | use Needle.Pointable, 3 | otp_app: :bonfire_valueflows, 4 | source: "vf_process_spec", 5 | table_id: "2SPEC1F1CAT10NF0RPR0CESSES" 6 | 7 | import Bonfire.Common.Repo.Utils, only: [change_public: 1, change_disabled: 1] 8 | 9 | alias Ecto.Changeset 10 | 11 | alias ValueFlows.Knowledge.ProcessSpecification 12 | 13 | # @type t :: %__MODULE__{} 14 | 15 | pointable_schema do 16 | field(:name, :string) 17 | field(:note, :string) 18 | 19 | field(:classified_as, {:array, :string}, virtual: true) 20 | 21 | belongs_to(:context, Needle.Pointer) 22 | 23 | belongs_to(:creator, ValueFlows.Util.user_schema()) 24 | 25 | field(:is_public, :boolean, virtual: true) 26 | field(:published_at, :utc_datetime_usec) 27 | field(:is_disabled, :boolean, virtual: true, default: false) 28 | field(:disabled_at, :utc_datetime_usec) 29 | field(:deleted_at, :utc_datetime_usec) 30 | 31 | timestamps(inserted_at: false) 32 | end 33 | 34 | @required ~w(name is_public)a 35 | @cast @required ++ ~w(note classified_as is_disabled context_id)a 36 | 37 | def create_changeset( 38 | %{} = creator, 39 | attrs 40 | ) do 41 | %ProcessSpecification{} 42 | |> Changeset.cast(attrs, @cast) 43 | |> Changeset.validate_required(@required) 44 | |> Changeset.change( 45 | creator_id: creator.id, 46 | is_public: true 47 | ) 48 | |> common_changeset() 49 | end 50 | 51 | def update_changeset(%ProcessSpecification{} = process_spec, attrs) do 52 | process_spec 53 | |> Changeset.cast(attrs, @cast) 54 | |> common_changeset() 55 | end 56 | 57 | defp common_changeset(changeset) do 58 | changeset 59 | |> change_public() 60 | |> change_disabled() 61 | end 62 | 63 | @behaviour Bonfire.Common.SchemaModule 64 | def context_module, 65 | do: ValueFlows.Knowledge.ProcessSpecification.ProcessSpecifications 66 | 67 | def query_module, do: ValueFlows.Knowledge.ProcessSpecification.Queries 68 | 69 | def follow_filters, do: [:default] 70 | end 71 | -------------------------------------------------------------------------------- /lib/knowledge/resource_specification/migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Knowledge.ResourceSpecification.Migrations do 2 | @moduledoc false 3 | use Ecto.Migration 4 | # alias Needle.ULID 5 | import Needle.Migration 6 | 7 | # alias ValueFlows.Knowledge.ResourceSpecification 8 | # alias ValueFlows.EconomicResource 9 | # alias ValueFlows.EconomicEvent 10 | 11 | # defp resource_table(), do: EconomicResource.__schema__(:source) 12 | 13 | def up do 14 | create_pointable_table(ValueFlows.Knowledge.ResourceSpecification) do 15 | add(:name, :string) 16 | add(:note, :text) 17 | 18 | add(:image_id, weak_pointer(ValueFlows.Util.image_schema()), null: true) 19 | 20 | # add(:resource_classified_as, {:array, :string}, virtual: true) 21 | 22 | add(:default_unit_of_effort_id, weak_pointer(Bonfire.Quantify.Unit), null: true) 23 | 24 | # optional context as in_scope_of 25 | add(:context_id, weak_pointer(), null: true) 26 | 27 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema()), null: true) 28 | 29 | add(:published_at, :timestamptz) 30 | add(:deleted_at, :timestamptz) 31 | add(:disabled_at, :timestamptz) 32 | 33 | timestamps(inserted_at: false, type: :utc_datetime_usec) 34 | end 35 | end 36 | 37 | def down do 38 | drop_pointable_table(ValueFlows.Knowledge.ResourceSpecification) 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/knowledge/resource_specification/resource_spec.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Knowledge.ResourceSpecification do 2 | use Needle.Pointable, 3 | otp_app: :bonfire_valueflows, 4 | source: "vf_resource_spec", 5 | table_id: "1PEC1F1CAT10NK1ND0FRES0VRC" 6 | 7 | import Bonfire.Common.Repo.Utils, only: [change_public: 1, change_disabled: 1] 8 | use Bonfire.Common.Utils 9 | 10 | alias Ecto.Changeset 11 | 12 | # 13 | # alias ValueFlows.Knowledge.Action 14 | alias ValueFlows.Knowledge.ResourceSpecification 15 | alias Bonfire.Quantify.Unit 16 | 17 | # @type t :: %__MODULE__{} 18 | 19 | pointable_schema do 20 | field(:name, :string) 21 | field(:note, :string) 22 | 23 | belongs_to(:image, Bonfire.Files.Media) 24 | 25 | # array of URI 26 | field(:resource_classified_as, {:array, :string}, virtual: true) 27 | 28 | # TODO hook up unit to contexts/resolvers 29 | belongs_to(:default_unit_of_effort, Unit, on_replace: :nilify) 30 | 31 | belongs_to(:creator, ValueFlows.Util.user_schema()) 32 | belongs_to(:context, Needle.Pointer) 33 | 34 | field(:is_public, :boolean, virtual: true) 35 | field(:published_at, :utc_datetime_usec) 36 | 37 | field(:is_disabled, :boolean, virtual: true, default: false) 38 | field(:disabled_at, :utc_datetime_usec) 39 | 40 | field(:deleted_at, :utc_datetime_usec) 41 | 42 | has_many(:conforming_resources, ValueFlows.EconomicResource, foreign_key: :conforms_to_id) 43 | 44 | timestamps(inserted_at: false) 45 | end 46 | 47 | @required ~w(name is_public)a 48 | @cast @required ++ ~w(note is_disabled context_id image_id)a 49 | 50 | def create_changeset( 51 | creator, 52 | %{id: _} = context, 53 | attrs 54 | ) do 55 | create_changeset( 56 | creator, 57 | attrs 58 | ) 59 | |> Changeset.change(context_id: context.id) 60 | end 61 | 62 | def create_changeset( 63 | %{} = creator, 64 | attrs 65 | ) do 66 | create_changeset( 67 | nil, 68 | attrs 69 | ) 70 | |> Changeset.change(creator_id: creator.id) 71 | end 72 | 73 | def create_changeset( 74 | _, 75 | attrs 76 | ) do 77 | %ResourceSpecification{} 78 | |> Changeset.cast(attrs, @cast) 79 | |> Changeset.change( 80 | default_unit_of_effort_id: Enums.attr_get_id(attrs, :default_unit_of_effort), 81 | is_public: true 82 | ) 83 | |> Changeset.validate_required(@required) 84 | |> common_changeset() 85 | end 86 | 87 | def update_changeset( 88 | %ResourceSpecification{} = resource_spec, 89 | %{id: _} = context, 90 | attrs 91 | ) do 92 | resource_spec 93 | |> Changeset.cast(attrs, @cast) 94 | |> Changeset.change( 95 | context_id: context.id, 96 | default_unit_of_effort_id: Enums.attr_get_id(attrs, :default_unit_of_effort) 97 | ) 98 | |> common_changeset() 99 | end 100 | 101 | def update_changeset(%ResourceSpecification{} = resource_spec, attrs) do 102 | resource_spec 103 | |> Changeset.cast(attrs, @cast) 104 | |> Changeset.change( 105 | default_unit_of_effort_id: Enums.attr_get_id(attrs, :default_unit_of_effort) 106 | ) 107 | |> common_changeset() 108 | end 109 | 110 | defp common_changeset(changeset) do 111 | changeset 112 | |> change_public() 113 | |> change_disabled() 114 | end 115 | 116 | @behaviour Bonfire.Common.SchemaModule 117 | def context_module, 118 | do: ValueFlows.Knowledge.ResourceSpecification.ResourceSpecifications 119 | 120 | def query_module, do: ValueFlows.Knowledge.ResourceSpecification.Queries 121 | 122 | def follow_filters, do: [:default] 123 | end 124 | -------------------------------------------------------------------------------- /lib/plan/graphql.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | if Application.compile_env(:bonfire_api_graphql, :modularity) != :disabled do 3 | defmodule ValueFlows.Plan.GraphQL do 4 | import Untangle 5 | 6 | # use Absinthe.Schema.Notation 7 | # import_sdl path: "lib/value_flows/graphql/schemas/plan.gql" 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /lib/planning/commitment/commitment.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Planning.Commitment do 2 | use Needle.Pointable, 3 | otp_app: :bonfire, 4 | source: "vf_commitment", 5 | table_id: "40MM1TMENTED95D6694555B6E8" 6 | 7 | alias Ecto.Changeset 8 | 9 | alias Bonfire.Quantify.Measure 10 | alias Bonfire.Geolocate.Geolocation 11 | alias ValueFlows.Knowledge.Action 12 | alias ValueFlows.Knowledge.ResourceSpecification 13 | alias ValueFlows.EconomicResource 14 | alias ValueFlows.Process 15 | 16 | # @type t :: %__MODULE__{} 17 | 18 | pointable_schema do 19 | belongs_to(:action, Action, type: :string) 20 | 21 | belongs_to(:input_of, Process) 22 | belongs_to(:output_of, Process) 23 | 24 | belongs_to(:provider, ValueFlows.Util.user_or_org_schema()) 25 | belongs_to(:receiver, ValueFlows.Util.user_or_org_schema()) 26 | 27 | field(:resource_classified_as, {:array, :string}, virtual: true) 28 | belongs_to(:resource_conforms_to, ResourceSpecification) 29 | belongs_to(:resource_inventoried_as, EconomicResource) 30 | 31 | belongs_to(:resource_quantity, Measure, on_replace: :nilify) 32 | belongs_to(:effort_quantity, Measure, on_replace: :nilify) 33 | 34 | field(:has_beginning, :utc_datetime_usec) 35 | field(:has_end, :utc_datetime_usec) 36 | field(:has_point_in_time, :utc_datetime_usec) 37 | field(:due, :utc_datetime_usec) 38 | # for the field `created`, use Needle.ULID.timestamp/1 39 | 40 | field(:finished, :boolean, default: false) 41 | 42 | # should this be a virtual field? 43 | field(:deletable, :boolean, default: false) 44 | 45 | field(:note, :string) 46 | field(:agreed_in, :string) 47 | 48 | # inScopeOf 49 | belongs_to(:context, Needle.Pointer) 50 | 51 | # belongs_to :clause_of, Agreement 52 | 53 | belongs_to(:at_location, Geolocation) 54 | 55 | # belongs_to :independent_demand_of, Plan 56 | 57 | belongs_to(:creator, ValueFlows.Util.user_schema()) 58 | 59 | field(:is_public, :boolean, virtual: true) 60 | field(:is_disabled, :boolean, virtual: true, default: false) 61 | field(:published_at, :utc_datetime_usec) 62 | field(:deleted_at, :utc_datetime_usec) 63 | field(:disabled_at, :utc_datetime_usec) 64 | 65 | timestamps(inserted_at: false) 66 | end 67 | 68 | @type attrs :: %{required(binary()) => term()} | %{required(atom()) => term()} 69 | 70 | @required ~w[action_id]a 71 | @cast @required ++ 72 | ~w[ 73 | action_id input_of_id output_of_id provider_id receiver_id 74 | resource_classified_as resource_conforms_to_id resource_inventoried_as_id 75 | resource_quantity_id effort_quantity_id 76 | has_beginning has_end has_point_in_time due 77 | finished note agreed_in 78 | context_id at_location_id 79 | deleted_at disabled_at 80 | ]a 81 | 82 | @spec create_changeset(struct(), attrs()) :: Changeset.t() 83 | def create_changeset(creator, attrs) do 84 | %__MODULE__{} 85 | |> Changeset.cast(attrs, @cast) 86 | |> Changeset.validate_required(@required) 87 | |> Changeset.change(is_public: true) 88 | |> Changeset.change(creator_id: creator.id) 89 | |> common_changeset(attrs) 90 | end 91 | 92 | @spec create_changeset(t(), attrs()) :: Changeset.t() 93 | def update_changeset(comm, attrs) do 94 | comm 95 | |> Changeset.cast(attrs, @cast) 96 | |> common_changeset(attrs) 97 | end 98 | 99 | @spec common_changeset(Chageset.t(), attrs()) :: Changeset.t() 100 | defp common_changeset(cset, attrs) do 101 | import Bonfire.Common.Repo.Utils, 102 | only: [change_public: 1, change_disabled: 1] 103 | 104 | cset 105 | |> ValueFlows.Util.change_measures(attrs, measure_fields()) 106 | |> change_public() 107 | |> change_disabled() 108 | |> Changeset.foreign_key_constraint( 109 | :at_location_id, 110 | name: :vf_commitment_at_location_id_fkey 111 | ) 112 | end 113 | 114 | def measure_fields(), 115 | do: [:resource_quantity, :effort_quantity] 116 | 117 | @behaviour Bonfire.Common.SchemaModule 118 | def context_module(), 119 | do: ValueFlows.Planning.Commitment.Commitments 120 | 121 | def query_module(), 122 | do: ValueFlows.Planning.Commitment.Queries 123 | 124 | def follow_filters(), 125 | do: [:default] 126 | end 127 | -------------------------------------------------------------------------------- /lib/planning/commitment/commitments.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Planning.Commitment.Commitments do 2 | import Bonfire.Common.Config, only: [repo: 0] 3 | 4 | use Bonfire.Common.Utils, 5 | only: [maybe: 2, e: 3, e: 4] 6 | 7 | alias ValueFlows.Knowledge.Action.Actions 8 | alias ValueFlows.Planning.Commitment 9 | alias ValueFlows.Planning.Commitment.Queries 10 | 11 | @typep attrs :: Commitment.attrs() 12 | 13 | def one(filters), 14 | do: repo().single(Queries.query(filters)) 15 | 16 | def by_id(id, user \\ nil), 17 | do: one([:default, user: user, id: id]) 18 | 19 | def many(filters \\ []), 20 | do: {:ok, repo().many(Queries.query(Commitment, filters))} 21 | 22 | def preload_all(comm) do 23 | {:ok, comm} = one(id: comm.id, preload: :all) 24 | preload_action(comm) 25 | end 26 | 27 | def preload_action(comm), 28 | do: Map.put(comm, :action, Actions.action!(comm.action_id)) 29 | 30 | @spec create(any(), attrs()) :: 31 | {:ok, Commitment.t()} | {:error, Changeset.t()} 32 | def create(creator, attrs) when is_map(attrs) do 33 | attrs = prep_attrs(attrs, creator) 34 | 35 | repo().transact_with(fn -> 36 | with {:ok, comm} <- 37 | Commitment.create_changeset(creator, attrs) |> repo().insert(), 38 | comm <- preload_all(%{comm | creator: creator}), 39 | {:ok, comm} <- ValueFlows.Util.try_tag_thing(nil, comm, attrs) do 40 | {:ok, comm} 41 | end 42 | end) 43 | end 44 | 45 | @spec update(struct(), String.t(), attrs()) :: 46 | {:ok, Commitment.t()} | {:error, any()} 47 | def update(user, id, changes) when is_binary(id) do 48 | with {:ok, comm} <- by_id(id, user) do 49 | do_update(comm, changes) 50 | end 51 | end 52 | 53 | @spec update(struct(), Commitment.t(), attrs()) :: 54 | {:ok, Commitment.t()} | {:error, any()} 55 | def update(user, comm, changes) do 56 | import ValueFlows.Util, only: [can?: 2] 57 | 58 | with :ok <- can?(user, comm) do 59 | do_update(comm, changes) 60 | end 61 | end 62 | 63 | @spec do_update(Commitment.t(), attrs()) :: 64 | {:ok, Commitment.t()} | {:error, any()} 65 | defp do_update(comm, attrs) do 66 | attrs = prep_attrs(attrs, Map.get(comm, :creator)) 67 | 68 | repo().transact_with(fn -> 69 | with {:ok, comm} <- 70 | Commitment.update_changeset(comm, attrs) |> repo().update(), 71 | comm <- preload_all(comm), 72 | {:ok, comm} <- ValueFlows.Util.try_tag_thing(nil, comm, attrs) do 73 | {:ok, comm} 74 | end 75 | end) 76 | end 77 | 78 | @spec soft_delete(struct(), String.t()) :: 79 | {:ok, Commitment.t()} | {:error, Changeset.t()} 80 | def soft_delete(id) when is_binary(id) do 81 | with {:ok, comm} <- by_id(id) do 82 | do_soft_delete(comm) 83 | end 84 | end 85 | 86 | @spec soft_delete(struct(), Commitment.t()) :: 87 | {:ok, Commitment.t()} | {:error, Changeset.t()} 88 | def soft_delete(comm, user) do 89 | import ValueFlows.Util, only: [can?: 3] 90 | 91 | with :ok <- can?(user, :delete, comm) do 92 | do_soft_delete(comm) 93 | end 94 | end 95 | 96 | @spec do_soft_delete(Commitment.t()) :: 97 | {:ok, Commitment.t()} | {:error, Chageset.t()} 98 | defp do_soft_delete(comm) do 99 | repo().transact_with(fn -> 100 | with {:ok, comm} <- Bonfire.Common.Repo.Delete.soft_delete(comm) do 101 | {:ok, comm} 102 | end 103 | end) 104 | end 105 | 106 | @spec prep_attrs(attrs(), struct()) :: attrs() 107 | def prep_attrs(attrs, creator \\ nil) do 108 | attrs 109 | |> Enums.maybe_put( 110 | :action_id, 111 | e(attrs, :action, :id, e(attrs, :action, nil)) 112 | |> ValueFlows.Knowledge.Action.Actions.id() 113 | ) 114 | |> Enums.maybe_put(:input_of_id, Enums.attr_get_id(attrs, :input_of)) 115 | |> Enums.maybe_put(:output_of_id, Enums.attr_get_id(attrs, :output_of)) 116 | |> Enums.maybe_put(:provider_id, Util.attr_get_agent(attrs, :provider, creator)) 117 | |> Enums.maybe_put(:receiver_id, Util.attr_get_agent(attrs, :receiver, creator)) 118 | |> Enums.maybe_put( 119 | :resource_conforms_to_id, 120 | Enums.attr_get_id(attrs, :resource_conforms_to) 121 | ) 122 | |> Enums.maybe_put( 123 | :resource_inventoried_as_id, 124 | Enums.attr_get_id(attrs, :resource_inventoried_as) 125 | ) 126 | |> Enums.maybe_put( 127 | :context_id, 128 | attrs |> Map.get(:in_scope_of) |> maybe(&List.first/1) 129 | ) 130 | |> Enums.maybe_put(:at_location_id, Enums.attr_get_id(attrs, :at_location)) 131 | |> ValueFlows.Util.parse_measurement_attrs(creator) 132 | end 133 | end 134 | -------------------------------------------------------------------------------- /lib/planning/commitment/graphql.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Bonfire.API.GraphQL) do 2 | defmodule ValueFlows.Planning.Commitment.GraphQL do 3 | import Bonfire.Common.Config, only: [repo: 0] 4 | 5 | alias Bonfire.API.GraphQL 6 | alias Bonfire.API.GraphQL.ResolveField 7 | 8 | alias ValueFlows.Planning.Commitment.Commitments 9 | alias ValueFlows.Planning.Satisfaction.Satisfactions 10 | 11 | def commitment(%{id: id}, info) do 12 | ResolveField.run(%ResolveField{ 13 | module: __MODULE__, 14 | fetcher: :fetch_commitment, 15 | context: id, 16 | info: info 17 | }) 18 | end 19 | 20 | def commitments_filtered(%{filter: filts} = args, _info) 21 | when is_map(filts) do 22 | limit = Map.get(args, :limit, 10) 23 | offset = Map.get(args, :start, 0) 24 | filts = Enum.reduce(filts, [limit: limit, offset: offset], &filter/2) 25 | Commitments.many([:default] ++ filts) 26 | end 27 | 28 | def commitments_filtered(args, _) do 29 | Commitments.many([ 30 | :default, 31 | limit: Map.get(args, :limit, 10), 32 | offset: Map.get(args, :start, 0) 33 | ]) 34 | end 35 | 36 | defp filter({:search_string, text}, acc) when is_binary(text), 37 | do: Keyword.put(acc, :search, text) 38 | 39 | defp filter({:action, id}, acc) when is_binary(id), 40 | do: Keyword.put(acc, :action_id, id) 41 | 42 | # TODO: startDate and endDate filters 43 | 44 | defp filter({:finished, finished?}, acc) when is_boolean(finished?), 45 | do: Keyword.put(acc, :status, (finished? && :closed) || :open) 46 | 47 | defp filter(_, acc), 48 | do: acc 49 | 50 | def fetch_commitment(info, id), 51 | do: Commitments.by_id(id, GraphQL.current_user(info)) 52 | 53 | def fetch_resource_inventoried_as_edge( 54 | %{resource_inventoried_as_id: id} = comm, 55 | _, 56 | _ 57 | ) 58 | when is_binary(id) do 59 | comm = repo().preload(comm, :resource_inventoried_as) 60 | {:ok, comm.resource_inventoried_as} 61 | end 62 | 63 | def fetch_resource_inventoried_as_edge(_, _, _), 64 | do: {:ok, nil} 65 | 66 | def fetch_input_of_edge(%{input_of_id: id} = comm, _, _) 67 | when is_binary(id) do 68 | comm = repo().preload(comm, :input_of) 69 | {:ok, comm.input_of} 70 | end 71 | 72 | def fetch_input_of_edge(_, _, _), 73 | do: {:ok, nil} 74 | 75 | def fetch_output_of_edge(%{output_of_id: id} = comm, _, _) 76 | when is_binary(id) do 77 | comm = repo().preload(comm, :output_of) 78 | {:ok, comm.output_of} 79 | end 80 | 81 | def fetch_output_of_edge(_, _, _), 82 | do: {:ok, nil} 83 | 84 | def fetch_created(%{id: id}, _, _) when is_binary(id), 85 | do: Needle.ULID.timestamp(id) 86 | 87 | def fetch_created(_, _, _), 88 | do: {:ok, nil} 89 | 90 | def fetch_satisfies_edge(%{id: id}, _, _) when is_binary(id), 91 | do: Satisfactions.many([:default, satisfied_by_id: id]) 92 | 93 | def fetch_satisfies_edge(_, _, _), 94 | do: {:ok, nil} 95 | 96 | def create_commitment(%{commitment: attrs}, info) do 97 | repo().transact_with(fn -> 98 | with {:ok, user} <- GraphQL.current_user_or_not_logged_in(info), 99 | {:ok, comm} <- Commitments.create(user, attrs) do 100 | {:ok, %{commitment: comm}} 101 | end 102 | end) 103 | end 104 | 105 | def update_commitment(%{commitment: %{id: id} = changes}, info) do 106 | with {:ok, user} <- GraphQL.current_user_or_not_logged_in(info), 107 | {:ok, comm} <- Commitments.update(user, id, changes) do 108 | {:ok, %{commitment: comm}} 109 | end 110 | end 111 | 112 | def delete_commitment(%{id: id}, info) do 113 | repo().transact_with(fn -> 114 | with {:ok, user} <- GraphQL.current_user_or_not_logged_in(info), 115 | {:ok, _} <- Commitments.soft_delete(id, user) do 116 | {:ok, true} 117 | end 118 | end) 119 | end 120 | end 121 | end 122 | -------------------------------------------------------------------------------- /lib/planning/commitment/migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Planning.Commitment.Migrations do 2 | @moduledoc false 3 | use Ecto.Migration 4 | 5 | import Needle.Migration 6 | 7 | alias ValueFlows.Planning.Commitment 8 | alias ValueFlows.Process 9 | alias ValueFlows.Knowledge.ResourceSpecification 10 | alias ValueFlows.EconomicResource 11 | alias Bonfire.Quantify.Measure 12 | alias Bonfire.Geolocate.Geolocation 13 | 14 | def up() do 15 | create_pointable_table(Commitment) do 16 | add(:action_id, :string, null: false) 17 | 18 | add(:input_of_id, weak_pointer(Process)) 19 | add(:output_of_id, weak_pointer(Process)) 20 | 21 | add(:provider_id, weak_pointer()) 22 | add(:receiver_id, weak_pointer()) 23 | 24 | add(:resource_conforms_to_id, weak_pointer(ResourceSpecification)) 25 | add(:resource_inventoried_as_id, weak_pointer(EconomicResource)) 26 | 27 | add(:resource_quantity_id, weak_pointer(Measure)) 28 | add(:effort_quantity_id, weak_pointer(Measure)) 29 | 30 | add(:has_beginning, :timestamptz) 31 | add(:has_end, :timestamptz) 32 | add(:has_point_in_time, :timestamptz) 33 | add(:due, :timestamptz) 34 | 35 | add(:finished, :boolean, default: false, null: false) 36 | add(:deletable, :boolean, default: false, null: false) 37 | add(:note, :text) 38 | add(:agreed_in, :string) 39 | 40 | # inScopeOf 41 | add(:context_id, weak_pointer()) 42 | 43 | # add :clause_of_id, week_pointer(Agreement) 44 | 45 | add(:at_location_id, weak_pointer(Geolocation)) 46 | 47 | # add :independent_demand_of_id, week_pointer(Plan) 48 | 49 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema())) 50 | 51 | add(:published_at, :timestamptz) 52 | add(:deleted_at, :timestamptz) 53 | add(:disabled_at, :timestamptz) 54 | 55 | timestamps(inserted_at: false, type: :utc_datetime_usec) 56 | end 57 | end 58 | 59 | def down(), 60 | do: drop_pointable_table(Commitment) 61 | end 62 | -------------------------------------------------------------------------------- /lib/planning/intent/migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Planning.Intent.Migrations do 2 | @moduledoc false 3 | use Ecto.Migration 4 | # alias Needle.ULID 5 | import Needle.Migration 6 | 7 | alias ValueFlows.Knowledge.ResourceSpecification 8 | alias ValueFlows.EconomicResource 9 | alias ValueFlows.Process 10 | # alias ValueFlows.Proposal 11 | 12 | defp intent_table(), do: ValueFlows.Planning.Intent.__schema__(:source) 13 | 14 | def up do 15 | create_pointable_table(ValueFlows.Planning.Intent) do 16 | add(:name, :string) 17 | add(:note, :text) 18 | 19 | # array of URI 20 | # add(:resource_classified_as, {:array, :string}) 21 | 22 | add(:action_id, :string) 23 | 24 | add(:image_id, weak_pointer(ValueFlows.Util.image_schema()), null: true) 25 | 26 | add(:provider_id, weak_pointer(), null: true) 27 | add(:receiver_id, weak_pointer(), null: true) 28 | 29 | add(:at_location_id, weak_pointer(Bonfire.Geolocate.Geolocation), null: true) 30 | 31 | add(:available_quantity_id, weak_pointer(Bonfire.Quantify.Measure), null: true) 32 | 33 | add(:resource_quantity_id, weak_pointer(Bonfire.Quantify.Measure), null: true) 34 | 35 | add(:effort_quantity_id, weak_pointer(Bonfire.Quantify.Measure), null: true) 36 | 37 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema()), null: true) 38 | 39 | # optional context as scope 40 | add(:context_id, weak_pointer(), null: true) 41 | 42 | add(:finished, :boolean, default: false) 43 | 44 | # # field(:deletable, :boolean) # TODO - virtual field? how is it calculated? 45 | 46 | # belongs_to(:agreed_in, Agreement) 47 | 48 | # inverse relationships 49 | # has_many(:published_in, ProposedIntent) 50 | # has_many(:satisfied_by, Satisfaction) 51 | 52 | add(:has_beginning, :timestamptz) 53 | add(:has_end, :timestamptz) 54 | add(:has_point_in_time, :timestamptz) 55 | add(:due, :timestamptz) 56 | 57 | add(:published_at, :timestamptz) 58 | add(:deleted_at, :timestamptz) 59 | add(:disabled_at, :timestamptz) 60 | 61 | timestamps(inserted_at: false, type: :utc_datetime_usec) 62 | end 63 | end 64 | 65 | def add_references do 66 | table = intent_table() 67 | 68 | # needed to avoid error: constraint x for relation "vf_intent" already exists 69 | execute("ALTER TABLE #{table} DROP CONSTRAINT IF EXISTS vf_intent_input_of_id_fkey;") 70 | execute("ALTER TABLE #{table} DROP CONSTRAINT IF EXISTS vf_intent_output_of_id_fkey;") 71 | 72 | execute( 73 | "ALTER TABLE #{table} DROP CONSTRAINT IF EXISTS vf_intent_resource_conforms_to_id_fkey;" 74 | ) 75 | 76 | execute( 77 | "ALTER TABLE #{table} DROP CONSTRAINT IF EXISTS vf_intent_resource_inventoried_as_id_fkey;" 78 | ) 79 | 80 | alter table(table) do 81 | add_if_not_exists(:input_of_id, weak_pointer(Process), null: true) 82 | add_if_not_exists(:output_of_id, weak_pointer(Process), null: true) 83 | 84 | add_if_not_exists( 85 | :resource_conforms_to_id, 86 | weak_pointer(ResourceSpecification), 87 | null: true 88 | ) 89 | 90 | add_if_not_exists( 91 | :resource_inventoried_as_id, 92 | weak_pointer(EconomicResource), 93 | null: true 94 | ) 95 | end 96 | end 97 | 98 | def down do 99 | drop_pointable_table(ValueFlows.Planning.Intent) 100 | end 101 | end 102 | -------------------------------------------------------------------------------- /lib/planning/satisfaction/graphql.ex: -------------------------------------------------------------------------------- 1 | if Code.ensure_loaded?(Bonfire.API.GraphQL) do 2 | defmodule ValueFlows.Planning.Satisfaction.GraphQL do 3 | import Bonfire.API.GraphQL, only: [current_user_or_not_logged_in: 1] 4 | import Bonfire.Common.Config, only: [repo: 0] 5 | 6 | alias ValueFlows.Planning.Satisfaction 7 | alias ValueFlows.Planning.Satisfaction.Satisfactions 8 | 9 | alias ValueFlows.Planning.Commitment 10 | alias ValueFlows.EconomicEvent 11 | 12 | def satisfaction(%{id: id}, info) do 13 | alias Bonfire.API.GraphQL.ResolveField 14 | 15 | ResolveField.run(%ResolveField{ 16 | module: __MODULE__, 17 | fetcher: :fetch_satisfaction, 18 | context: id, 19 | info: info 20 | }) 21 | end 22 | 23 | def satisfactions_filtered(args, _info) do 24 | limit = Map.get(args, :limit, 10) 25 | offset = Map.get(args, :start, 0) 26 | Satisfactions.many([:default, limit: limit, offset: offset]) 27 | end 28 | 29 | def fetch_satisfaction(info, id) do 30 | import Bonfire.API.GraphQL, only: [current_user: 1] 31 | 32 | Satisfactions.by_id(id, current_user(info)) 33 | end 34 | 35 | def fetch_satisfies_edge(%{satisfies_id: id} = satis, _, _) 36 | when is_binary(id) do 37 | satis = repo().preload(satis, :satisfies) 38 | {:ok, Map.get(satis, :satisfies, nil)} 39 | end 40 | 41 | def fetch_satisfies_edge(_, _, _), 42 | do: {:ok, nil} 43 | 44 | def fetch_satisfied_by_edge(%Satisfaction{} = satis, _, _) do 45 | %{satisfied_by: satis_by} = repo().preload(satis, :satisfied_by) 46 | {:ok, Bonfire.Common.Needles.get(satis_by)} 47 | end 48 | 49 | def fetch_satisfied_by_edge(_, _, _), 50 | do: {:ok, nil} 51 | 52 | def event_or_commitment_resolve_type(%EconomicEvent{}, _), 53 | do: :economic_event 54 | 55 | def event_or_commitment_resolve_type(%Commitment{}, _), 56 | do: :commitment 57 | 58 | def event_or_commitment_resolve_type(_, _), 59 | do: nil 60 | 61 | def create(%{satisfaction: attrs}, info) do 62 | repo().transact_with(fn -> 63 | with {:ok, user} <- current_user_or_not_logged_in(info), 64 | {:ok, satis} <- Satisfactions.create(user, attrs) do 65 | {:ok, %{satisfaction: satis}} 66 | end 67 | end) 68 | end 69 | 70 | def update(%{satisfaction: %{id: id} = changes}, info) do 71 | with {:ok, user} <- current_user_or_not_logged_in(info), 72 | {:ok, satis} <- Satisfactions.update(user, id, changes) do 73 | {:ok, %{satisfaction: satis}} 74 | end 75 | end 76 | 77 | def delete(%{id: id}, info) do 78 | with {:ok, user} <- current_user_or_not_logged_in(info), 79 | {:ok, _} <- Satisfactions.soft_delete(id, user) do 80 | {:ok, true} 81 | end 82 | end 83 | end 84 | end 85 | -------------------------------------------------------------------------------- /lib/planning/satisfaction/migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Planning.Satisfaction.Migrations do 2 | @moduledoc false 3 | use Ecto.Migration 4 | 5 | import Needle.Migration 6 | 7 | alias ValueFlows.Planning.Intent 8 | alias ValueFlows.Planning.Satisfaction 9 | 10 | alias ValueFlows.EconomicEvent 11 | alias Bonfire.Quantify.Measure 12 | 13 | def up() do 14 | create_pointable_table(Satisfaction) do 15 | add(:satisfies_id, weak_pointer(Intent)) 16 | # EconomicEvent or Commitment 17 | add(:satisfied_by_id, weak_pointer()) 18 | 19 | add(:resource_quantity_id, weak_pointer(Measure)) 20 | add(:effort_quantity_id, weak_pointer(Measure)) 21 | 22 | add(:note, :text) 23 | 24 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema())) 25 | 26 | add(:published_at, :timestamptz) 27 | add(:deleted_at, :timestamptz) 28 | add(:disabled_at, :timestamptz) 29 | 30 | timestamps(inserted_at: false, type: :utc_datetime_usec) 31 | end 32 | end 33 | 34 | def down(), 35 | do: drop_pointable_table(Satisfaction) 36 | end 37 | -------------------------------------------------------------------------------- /lib/planning/satisfaction/satisfaction.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Planning.Satisfaction do 2 | use Needle.Pointable, 3 | otp_app: :bonfire, 4 | source: "vf_satisfaction", 5 | table_id: "1AT1SFACT10N4F8994AD427E7B" 6 | 7 | alias Ecto.Changeset 8 | alias ValueFlows.EconomicEvent 9 | alias ValueFlows.Planning.Intent 10 | alias ValueFlows.Planning.Commitment 11 | 12 | alias Bonfire.Quantify.Measure 13 | alias Needle.Pointer 14 | 15 | # @type t :: %__MODULE__{ 16 | # id: String.t(), 17 | # satisfies: Intent.t(), 18 | # satisfies_id: String.t(), 19 | # satisfied_by: EconomicEvent.t() | Commitment.t(), 20 | # satisfied_by_id: String.t(), 21 | # resource_quantity: Measure.t(), 22 | # resource_quantity_id: String.t(), 23 | # effort_quantity: Measure.t(), 24 | # effort_quantity: String.t(), 25 | # note: String.t(), 26 | # creator: struct(), 27 | # creator_id: String.t(), 28 | # is_public: boolean(), 29 | # is_disabled: boolean(), 30 | # published_at: DateTime.t(), 31 | # deleted_at: DateTime.t(), 32 | # disabled_at: DateTime.t(), 33 | # updated_at: DateTime.t() 34 | # } 35 | 36 | pointable_schema do 37 | belongs_to(:satisfies, Intent) 38 | # Commitment or EconomicEvent 39 | belongs_to(:satisfied_by, Pointer) 40 | belongs_to(:resource_quantity, Measure, on_replace: :nilify) 41 | belongs_to(:effort_quantity, Measure, on_replace: :nilify) 42 | 43 | field(:note, :string) 44 | 45 | belongs_to(:creator, ValueFlows.Util.user_schema()) 46 | 47 | field(:is_public, :boolean, virtual: true) 48 | field(:is_disabled, :boolean, virtual: true, default: false) 49 | field(:published_at, :utc_datetime_usec) 50 | field(:deleted_at, :utc_datetime_usec) 51 | field(:disabled_at, :utc_datetime_usec) 52 | 53 | timestamps(inserted_at: false) 54 | end 55 | 56 | @type attrs :: %{required(binary()) => term()} | %{required(atom()) => term()} 57 | 58 | @reqr ~w[satisfies_id satisfied_by_id]a 59 | @cast @reqr ++ 60 | ~w[ 61 | resource_quantity_id effort_quantity_id note 62 | disabled_at 63 | ]a 64 | 65 | @spec create_changeset(struct(), attrs()) :: Changeset.t() 66 | def create_changeset(creator, attrs) do 67 | %__MODULE__{} 68 | |> Changeset.cast(attrs, @cast) 69 | |> Changeset.validate_required(@reqr) 70 | |> Changeset.change(is_public: true, creator_id: creator.id) 71 | |> common_changeset(attrs) 72 | end 73 | 74 | @spec update_changeset(t(), attrs()) :: Changeset.t() 75 | def update_changeset(satis, attrs) do 76 | satis 77 | |> Changeset.cast(attrs, @cast) 78 | |> common_changeset(attrs) 79 | end 80 | 81 | @spec common_changeset(Chageset.t(), attrs()) :: Changeset.t() 82 | defp common_changeset(cset, attrs) do 83 | import Bonfire.Common.Repo.Utils, 84 | only: [change_public: 1, change_disabled: 1] 85 | 86 | cset 87 | |> ValueFlows.Util.change_measures(attrs, measure_fields()) 88 | |> change_public() 89 | |> change_disabled() 90 | end 91 | 92 | def measure_fields(), 93 | do: [:resource_quantity, :effort_quantity] 94 | 95 | @behaviour Bonfire.Common.SchemaModule 96 | def context_module(), 97 | do: ValueFlows.Planning.Satisfaction.Satisfactions 98 | 99 | def query_module(), 100 | do: ValueFlows.Planning.Satisfaction.Queries 101 | 102 | def follow_filters(), 103 | do: [:default] 104 | end 105 | -------------------------------------------------------------------------------- /lib/planning/satisfaction/satisfaction_queries.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Planning.Satisfaction.Queries do 2 | import Ecto.Query 3 | import Untangle 4 | 5 | alias ValueFlows.Planning.Satisfaction 6 | 7 | def query(Satisfaction), 8 | do: from(s in Satisfaction, as: :satisfaction) 9 | 10 | def query(:count), 11 | do: from(s in Satisfaction, as: :satisfaction) 12 | 13 | def query(filters), 14 | do: query(Satisfaction, filters) 15 | 16 | def query(q, filters), 17 | do: filter(query(q), filters) 18 | 19 | def queries(query, _page_opts, base_filters, data_filters, count_filters) do 20 | base_q = query(query, base_filters) 21 | data_q = filter(base_q, data_filters) 22 | count_q = filter(base_q, count_filters) 23 | {data_q, count_q} 24 | end 25 | 26 | def join_to(q, spec, join_qualifier \\ :left) 27 | 28 | def join_to(q, specs, jq) when is_list(specs), 29 | do: Enum.reduce(specs, q, &join_to(&2, &1, jq)) 30 | 31 | def join_to(q, :effort_quantity, jq), 32 | do: join(q, jq, [satisfaction: s], q in assoc(s, :effort_quantity), as: :effort_quantity) 33 | 34 | def join_to(q, :resource_quantity, jq), 35 | do: join(q, jq, [satisfaction: s], q in assoc(s, :resource_quantity), as: :resource_quantity) 36 | 37 | # filter 38 | def filter(q, filters) when is_list(filters), 39 | do: Enum.reduce(filters, q, &filter(&2, &1)) 40 | 41 | def filter(q, :default), 42 | do: filter(q, [:deleted, order: :default, preload: :quantities]) 43 | 44 | def filter(q, :deleted), 45 | do: where(q, [satisfaction: s], is_nil(s.deleted_at)) 46 | 47 | def filter(q, :deleted, true), 48 | do: where(q, [satisfaction: s], not is_nil(s.deleted_at)) 49 | 50 | def filter(q, :disabled), 51 | do: where(q, [satisfaction: s], is_nil(s.disabled_at)) 52 | 53 | def filter(q, :private), 54 | do: where(q, [satisfaction: s], not is_nil(s.published_at)) 55 | 56 | def filter(q, {:status, :open}), 57 | do: where(q, [satisfaction: s], s.finished == false) 58 | 59 | def filter(q, {:status, :closed}), 60 | do: where(q, [satisfaction: s], s.finished == true) 61 | 62 | # search 63 | def filter(q, {:search, text}), 64 | do: where(q, [satisfaction: s], ilike(s.note, ^"%#{text}%")) 65 | 66 | # user 67 | def filter(q, {:user, %{id: user_id}}) do 68 | q 69 | |> where( 70 | [satisfaction: s], 71 | not is_nil(s.published_at) or s.creator_id == ^user_id 72 | ) 73 | |> filter([:disabled]) 74 | end 75 | 76 | # field 77 | def filter(q, {:id, id}) when is_binary(id), 78 | do: where(q, [satisfaction: s], s.id == ^id) 79 | 80 | def filter(q, {:id, ids}) when is_list(ids), 81 | do: where(q, [satisfaction: s], s.id in ^ids) 82 | 83 | def filter(q, {:satisfies_id, id}), 84 | do: where(q, [satisfaction: s], s.satisfies_id == ^id) 85 | 86 | def filter(q, {:satisfied_by_id, id}), 87 | do: where(q, [satisfaction: s], s.satisfied_by_id == ^id) 88 | 89 | # order 90 | def filter(q, {:order, :default}), 91 | do: order_by(q, [satisfaction: s], desc: s.updated_at, asc: s.id) 92 | 93 | def filter(q, {:order, [desc: key]}), 94 | do: order_by(q, [satisfaction: s], desc: field(s, ^key)) 95 | 96 | def filter(q, {:order, [asc: key]}), 97 | do: order_by(q, [satisfaction: s], asc: field(s, ^key)) 98 | 99 | def filter(q, {:order, key}), 100 | do: filter(q, order: [desc: key]) 101 | 102 | # group and count 103 | def filter(q, {:group_count, key}), 104 | do: filter(q, group: key, count: key) 105 | 106 | def filter(q, {:group, key}), 107 | do: group_by(q, [satisfaction: s], field(s, ^key)) 108 | 109 | def filter(q, {:count, key}), 110 | do: select(q, [satisfaction: s], {field(s, ^key), count(s.id)}) 111 | 112 | def filter(q, {:preload, :all}) do 113 | q 114 | |> preload([:satisfies, :satisfied_by]) 115 | |> filter({:preload, :quantities}) 116 | end 117 | 118 | def filter(q, {:preload, :quantities}) do 119 | q 120 | |> join_to([:effort_quantity, :resource_quantity]) 121 | |> preload([:effort_quantity, :resource_quantity]) 122 | end 123 | 124 | # pagination 125 | def filter(q, {:offset, offset}), 126 | do: offset(q, ^offset) 127 | 128 | def filter(q, {:limit, limit}), 129 | do: limit(q, ^limit) 130 | 131 | def filter(q, {:paginate_id, %{after: a, limit: limit}}) do 132 | limit = limit + 2 133 | 134 | q 135 | |> where([satisfaction: s], s.id >= ^a) 136 | |> limit(^limit) 137 | end 138 | 139 | def filter(q, {:paginate_id, %{before: b, limit: limit}}) do 140 | q 141 | |> where([satisfaction: s], s.id <= ^b) 142 | |> filter(limit: limit + 2) 143 | end 144 | 145 | def filter(q, {:paginate_id, %{limit: limit}}), 146 | do: filter(q, limit: limit + 1) 147 | 148 | def filter(q, other_filter), 149 | do: ValueFlows.Util.common_filters(q, other_filter) 150 | end 151 | -------------------------------------------------------------------------------- /lib/planning/satisfaction/satisfactions.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Planning.Satisfaction.Satisfactions do 2 | import Bonfire.Common.Config, only: [repo: 0] 3 | use Bonfire.Common.Utils 4 | alias Bonfire.Common.Enums 5 | 6 | alias Ecto.Changeset 7 | alias ValueFlows.Planning.Satisfaction 8 | alias ValueFlows.Planning.Satisfaction.Queries 9 | 10 | @typep attrs :: Satisfaction.attrs() 11 | 12 | def one(filters), 13 | do: repo().single(Queries.query(filters)) 14 | 15 | def by_id(id, user \\ nil), 16 | do: one([:default, user: user, id: id]) 17 | 18 | def preload_all(%{id: id}) do 19 | {:ok, satis} = one(id: id, preload: :all) 20 | satis 21 | end 22 | 23 | def many(filters \\ []), 24 | do: {:ok, repo().many(Queries.query(filters))} 25 | 26 | @spec create(struct(), attrs()) :: 27 | {:ok, Satisfaction.t()} | {:error, Changeset.t()} 28 | def create(creator, attrs) do 29 | attrs = prep_attrs(attrs, creator) 30 | 31 | repo().transact_with(fn -> 32 | with {:ok, satis} <- 33 | Satisfaction.create_changeset(creator, attrs) |> repo().insert(), 34 | satis = preload_all(%{satis | creator: creator}) do 35 | {:ok, satis} 36 | end 37 | end) 38 | end 39 | 40 | @spec update(struct(), String.t(), attrs()) :: 41 | {:ok, Satisfaction.t()} | {:error, any()} 42 | def update(user, id, changes) when is_binary(id) do 43 | with {:ok, satis} <- by_id(id, user) do 44 | do_update(satis, changes) 45 | end 46 | end 47 | 48 | @spec update(struct(), Satisfaction.t(), attrs()) :: 49 | {:ok, Satisfaction.t()} | {:error, any()} 50 | def update(user, satis, changes) do 51 | import ValueFlows.Util, only: [can?: 2] 52 | 53 | with :ok <- can?(user, satis) do 54 | do_update(satis, changes) 55 | end 56 | end 57 | 58 | @spec do_update(Satisfaction.t(), attrs()) :: 59 | {:ok, Satisfaction.t()} | {:error, any()} 60 | defp do_update(satis, attrs) do 61 | attrs = prep_attrs(attrs, Map.get(satis, :creator)) 62 | 63 | repo().transact_with(fn -> 64 | with {:ok, satis} <- 65 | repo().update(Satisfaction.update_changeset(satis, attrs)) do 66 | satis = preload_all(satis) 67 | {:ok, satis} 68 | end 69 | end) 70 | end 71 | 72 | @spec soft_delete(struct(), String.t()) :: 73 | {:ok, Satisfaction.t()} | {:error, Changeset.t()} 74 | def soft_delete(id, user) when is_binary(id) do 75 | with {:ok, satis} <- by_id(id, user) do 76 | do_soft_delete(satis) 77 | end 78 | end 79 | 80 | @spec soft_delete(struct(), Satisfaction.t()) :: 81 | {:ok, Satisfaction.t()} | {:error, Changeset.t()} 82 | def soft_delete(satis, user) do 83 | import ValueFlows.Util, only: [can?: 3] 84 | 85 | with :ok <- can?(user, :delete, satis) do 86 | do_soft_delete(satis) 87 | end 88 | end 89 | 90 | @spec do_soft_delete(Satisfaction.t()) :: 91 | {:ok, Satisfaction.t()} | {:error, Chageset.t()} 92 | defp do_soft_delete(satis) do 93 | repo().transact_with(fn -> 94 | with {:ok, satis} <- Bonfire.Common.Repo.Delete.soft_delete(satis) do 95 | {:ok, satis} 96 | end 97 | end) 98 | end 99 | 100 | @spec prep_attrs(attrs(), struct()) :: attrs() 101 | defp prep_attrs(attrs, creator) do 102 | attrs 103 | |> Enums.maybe_put(:satisfies_id, Enums.attr_get_id(attrs, :satisfies)) 104 | |> Enums.maybe_put(:satisfied_by_id, Enums.attr_get_id(attrs, :satisfied_by)) 105 | |> ValueFlows.Util.parse_measurement_attrs(creator) 106 | end 107 | end 108 | -------------------------------------------------------------------------------- /lib/process/migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Process.Migrations do 2 | @moduledoc false 3 | use Ecto.Migration 4 | # alias Needle.ULID 5 | import Needle.Migration 6 | 7 | # alias ValueFlows.Process 8 | alias ValueFlows.Knowledge.ProcessSpecification 9 | 10 | # defp resource_table(), do: EconomicResource.__schema__(:source) 11 | 12 | def up do 13 | create_pointable_table(ValueFlows.Process) do 14 | add(:name, :string) 15 | add(:note, :text) 16 | 17 | # add(:image_id, weak_pointer(ValueFlows.Util.image_schema()), null: true) 18 | 19 | add(:has_beginning, :timestamptz) 20 | add(:has_end, :timestamptz) 21 | 22 | add(:finished, :boolean, default: false) 23 | 24 | # add(:resource_classified_as, {:array, :string}, virtual: true) 25 | 26 | add(:based_on_id, weak_pointer(ProcessSpecification), null: true) 27 | 28 | # optional context as in_scope_of 29 | add(:context_id, weak_pointer(), null: true) 30 | 31 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema()), null: true) 32 | 33 | add(:published_at, :timestamptz) 34 | add(:deleted_at, :timestamptz) 35 | add(:disabled_at, :timestamptz) 36 | 37 | timestamps(inserted_at: false, type: :utc_datetime_usec) 38 | end 39 | end 40 | 41 | def down do 42 | drop_pointable_table(ValueFlows.Process) 43 | end 44 | end 45 | -------------------------------------------------------------------------------- /lib/process/process.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Process do 2 | use Needle.Pointable, 3 | otp_app: :bonfire_valueflows, 4 | source: "vf_process", 5 | table_id: "4AYF0R1NPVTST0BEC0ME0VTPVT" 6 | 7 | import Bonfire.Common.Repo.Utils, only: [change_public: 1, change_disabled: 1] 8 | 9 | alias Ecto.Changeset 10 | 11 | alias ValueFlows.Process 12 | # alias Bonfire.Quantify.Measure 13 | 14 | # alias ValueFlows.Knowledge.Action 15 | alias ValueFlows.Knowledge.ProcessSpecification 16 | alias ValueFlows.Planning.Intent 17 | alias ValueFlows.EconomicEvent 18 | 19 | # @type t :: %__MODULE__{} 20 | 21 | pointable_schema do 22 | field(:name, :string) 23 | field(:note, :string) 24 | 25 | # belongs_to(:image, Bonfire.Files.Media) 26 | 27 | field(:has_beginning, :utc_datetime_usec) 28 | field(:has_end, :utc_datetime_usec) 29 | 30 | field(:finished, :boolean, default: false) 31 | 32 | field(:classified_as, {:array, :string}, virtual: true) 33 | 34 | belongs_to(:based_on, ProcessSpecification) 35 | 36 | belongs_to(:context, Needle.Pointer) 37 | 38 | has_many(:intended_inputs, Intent, 39 | foreign_key: :input_of_id, 40 | references: :id 41 | ) 42 | 43 | has_many(:intended_outputs, Intent, 44 | foreign_key: :output_of_id, 45 | references: :id 46 | ) 47 | 48 | has_many(:trace, EconomicEvent, foreign_key: :input_of_id, references: :id) 49 | has_many(:inputs, EconomicEvent, foreign_key: :input_of_id, references: :id) 50 | 51 | has_many(:track, EconomicEvent, foreign_key: :output_of_id, references: :id) 52 | 53 | has_many(:outputs, EconomicEvent, 54 | foreign_key: :output_of_id, 55 | references: :id 56 | ) 57 | 58 | # TODO 59 | # workingAgents: [Agent!] 60 | # unplannedEconomicEvents(action: ID): [EconomicEvent!] 61 | 62 | # nextProcesses: [Process!] 63 | # previousProcesses: [Process!] 64 | 65 | # committedInputs(action: ID): [Commitment!] 66 | # committedOutputs(action: ID): [Commitment!] 67 | 68 | # plannedWithin: Plan 69 | # nestedIn: Scenario 70 | # field(:deletable, :boolean) # TODO - virtual field? how is it calculated? 71 | 72 | belongs_to(:creator, ValueFlows.Util.user_schema()) 73 | 74 | field(:is_public, :boolean, virtual: true) 75 | field(:published_at, :utc_datetime_usec) 76 | field(:is_disabled, :boolean, virtual: true, default: false) 77 | field(:disabled_at, :utc_datetime_usec) 78 | field(:deleted_at, :utc_datetime_usec) 79 | 80 | timestamps(inserted_at: false) 81 | end 82 | 83 | @required ~w(name is_public)a 84 | @cast @required ++ 85 | ~w(note has_beginning has_end finished is_disabled context_id based_on_id)a 86 | 87 | def validate_changeset(attrs \\ %{}) do 88 | %Process{} 89 | |> Changeset.cast(attrs, @cast) 90 | |> Changeset.change(is_public: true) 91 | |> Changeset.validate_required(@required) 92 | |> common_changeset() 93 | end 94 | 95 | def create_changeset( 96 | %{} = creator, 97 | attrs 98 | ) do 99 | attrs 100 | |> validate_changeset() 101 | |> Changeset.change(creator_id: creator.id) 102 | end 103 | 104 | def create_changeset( 105 | _, 106 | attrs 107 | ) do 108 | validate_changeset(attrs) 109 | end 110 | 111 | def update_changeset(%Process{} = process, attrs) do 112 | process 113 | |> Changeset.cast(attrs, @cast) 114 | |> common_changeset() 115 | end 116 | 117 | defp common_changeset(changeset) do 118 | changeset 119 | |> change_public() 120 | |> change_disabled() 121 | end 122 | 123 | @behaviour Bonfire.Common.SchemaModule 124 | def context_module, do: ValueFlows.Process.Processes 125 | def query_module, do: ValueFlows.Process.Queries 126 | 127 | def follow_filters, do: [:default] 128 | end 129 | -------------------------------------------------------------------------------- /lib/proposal/_attempts.ex: -------------------------------------------------------------------------------- 1 | # def ap_object_format_attempt1(obj) do 2 | # obj = preloads(obj) 3 | # 4 | # # image = ValueFlows.Util.image_url(obj) 5 | 6 | # Map.merge( 7 | # %{ 8 | # "type" => "ValueFlows:Proposal", 9 | # # "canonicalUrl" => obj.canonical_url, 10 | # # "icon" => icon, 11 | # "published" => obj.has_beginning 12 | # }, 13 | # keys_transform(obj, "to_string") 14 | # ) 15 | # end 16 | 17 | # def graphql_get_proposal_attempt2(id) do 18 | # query = 19 | # Grumble.PP.to_string( 20 | # Grumble.field( 21 | # :proposal, 22 | # args: [id: Grumble.var(:id)], 23 | # fields: ValueFlows.Simulate.proposal_fields(eligible_location: [:name]) 24 | # ) 25 | # ) 26 | # |> IO.inspect() 27 | 28 | # with {:ok, g} <- 29 | # """ 30 | # query ($id: ID) { 31 | # #{query} 32 | # } 33 | # """ 34 | # |> Absinthe.run(@schema, variables: %{"id" => id}) do 35 | # g |> Map.get(:data) |> Map.get("proposal") 36 | # end 37 | # end 38 | 39 | # def ap_object_prepare_attempt2(id) do 40 | # with obj <- graphql_get_proposal_attempt2(id) do 41 | # Map.merge( 42 | # %{ 43 | # "type" => "ValueFlows:Proposal" 44 | # # "canonicalUrl" => obj.canonical_url, 45 | # # "icon" => icon, 46 | # # "published" => obj.hasBeginning 47 | # }, 48 | # obj 49 | # ) 50 | # end 51 | # end 52 | 53 | # def graphql_document_for(schema, type, nesting, override_fun \\ []) do 54 | # schema 55 | # |> Bonfire.API.GraphQL.QueryHelper.fields_for(type, nesting) 56 | # # |> IO.inspect() 57 | # |> Bonfire.API.GraphQL.QueryHelper.apply_overrides(override_fun) 58 | # |> Bonfire.API.GraphQL.QueryHelper.format_fields(type, 10, schema) 59 | # |> List.to_string() 60 | # end 61 | 62 | # def graphql_get_proposal_attempt3(id) do 63 | # query = Bonfire.API.GraphQL.QueryHelper.document_for(@schema, :proposal, 4, &fields_filter/1) 64 | # IO.inspect(query) 65 | 66 | # with {:ok, g} <- 67 | # """ 68 | # query ($id: ID) { 69 | # proposal(id: $id) { 70 | # #{query} 71 | # } 72 | # } 73 | # """ 74 | # |> Absinthe.run(@schema, variables: %{"id" => id}) do 75 | # IO.inspect(g) 76 | # g |> Map.get(:data) |> Map.get("proposal") 77 | # end 78 | # end 79 | -------------------------------------------------------------------------------- /lib/proposal/migrations.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Proposal.Migrations do 2 | @moduledoc false 3 | use Ecto.Migration 4 | import Needle.Migration 5 | 6 | def up do 7 | create_pointable_table(ValueFlows.Proposal) do 8 | add(:name, :string) 9 | add(:note, :text) 10 | 11 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema()), null: true) 12 | 13 | add(:eligible_location_id, weak_pointer(Bonfire.Geolocate.Geolocation), null: true) 14 | 15 | # optional context as scope 16 | add(:context_id, weak_pointer(), null: true) 17 | 18 | add(:unit_based, :boolean, default: false) 19 | 20 | add(:has_beginning, :timestamptz) 21 | add(:has_end, :timestamptz) 22 | add(:created, :timestamptz) 23 | 24 | add(:published_at, :timestamptz) 25 | add(:deleted_at, :timestamptz) 26 | add(:disabled_at, :timestamptz) 27 | 28 | timestamps(inserted_at: false, type: :utc_datetime_usec) 29 | end 30 | 31 | create_pointable_table(ValueFlows.Proposal.ProposedIntent) do 32 | # Note: null allowed 33 | add(:reciprocal, :boolean, null: true) 34 | add(:deleted_at, :timestamptz) 35 | 36 | add(:publishes_id, strong_pointer(ValueFlows.Planning.Intent), null: false) 37 | 38 | add(:published_in_id, strong_pointer(ValueFlows.Proposal), null: false) 39 | end 40 | 41 | create_pointable_table(ValueFlows.Proposal.ProposedTo) do 42 | add(:deleted_at, :timestamptz) 43 | add(:proposed_to_id, weak_pointer(), null: false) 44 | add(:proposed_id, weak_pointer(), null: false) 45 | end 46 | end 47 | 48 | def down do 49 | drop_pointable_table(ValueFlows.Proposal.ProposedTo) 50 | drop_pointable_table(ValueFlows.Proposal.ProposedIntent) 51 | drop_pointable_table(ValueFlows.Proposal) 52 | end 53 | end 54 | -------------------------------------------------------------------------------- /lib/proposal/proposal.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Proposal do 2 | @moduledoc """ 3 | Schema for proposal, using `Needle.Pointable` 4 | """ 5 | use Needle.Pointable, 6 | otp_app: :bonfire_valueflows, 7 | source: "vf_proposal", 8 | table_id: "6R0P0SA11SMADE0FTW01NTENTS" 9 | 10 | import Bonfire.Common.Repo.Utils, only: [change_public: 1, change_disabled: 1] 11 | alias Ecto.Changeset 12 | 13 | alias ValueFlows.Proposal 14 | alias ValueFlows.Proposal.ProposedIntent 15 | alias ValueFlows.Proposal.ProposedTo 16 | 17 | alias ValueFlows.Planning.Intent 18 | 19 | # @type t :: %__MODULE__{} 20 | 21 | pointable_schema do 22 | field(:name, :string) 23 | field(:note, :string) 24 | 25 | field(:created, :utc_datetime_usec) 26 | 27 | field(:has_beginning, :utc_datetime_usec) 28 | field(:has_end, :utc_datetime_usec) 29 | 30 | # TODO: should be the same as has_beginning? 31 | field(:published_at, :utc_datetime_usec) 32 | field(:is_public, :boolean, virtual: true) 33 | 34 | field(:is_disabled, :boolean, virtual: true, default: false) 35 | field(:disabled_at, :utc_datetime_usec) 36 | 37 | field(:deleted_at, :utc_datetime_usec) 38 | 39 | field(:unit_based, :boolean, default: false) 40 | 41 | belongs_to(:creator, ValueFlows.Util.user_schema()) 42 | 43 | belongs_to(:context, Needle.Pointer) 44 | 45 | belongs_to(:eligible_location, Bonfire.Geolocate.Geolocation) 46 | 47 | has_many(:publishes, ProposedIntent) 48 | many_to_many(:publishes_intents, Intent, join_through: ProposedIntent) 49 | 50 | many_to_many(:proposed_to, Needle.Pointer, join_through: ProposedTo) 51 | 52 | timestamps(inserted_at: false) 53 | end 54 | 55 | @required ~w(name is_public)a 56 | @cast @required ++ 57 | ~w(note has_beginning has_end unit_based eligible_location_id context_id)a 58 | 59 | def create_changeset( 60 | %{} = creator, 61 | attrs 62 | ) do 63 | %Proposal{} 64 | |> Changeset.cast(attrs, @cast) 65 | |> Changeset.change( 66 | created: DateTime.utc_now(), 67 | creator_id: creator.id, 68 | is_public: true 69 | ) 70 | |> Changeset.validate_required(@required) 71 | |> common_changeset() 72 | end 73 | 74 | def update_changeset(%Proposal{} = proposal, attrs) do 75 | proposal 76 | |> Changeset.cast(attrs, @cast) 77 | |> common_changeset() 78 | end 79 | 80 | defp common_changeset(changeset) do 81 | changeset 82 | |> change_public() 83 | |> Changeset.foreign_key_constraint( 84 | :eligible_location, 85 | name: :vf_proposal_eligible_location_id_fkey 86 | ) 87 | end 88 | 89 | @behaviour Bonfire.Common.SchemaModule 90 | def context_module, do: ValueFlows.Proposal.Proposals 91 | def query_module, do: ValueFlows.Proposal.Queries 92 | 93 | def follow_filters, do: [:default] 94 | end 95 | -------------------------------------------------------------------------------- /lib/proposal/proposal_queries.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Proposal.Queries do 3 | alias ValueFlows.Proposal 4 | 5 | # alias ValueFlows.Proposal.Proposals 6 | 7 | import Bonfire.Common.Repo.Utils, only: [match_admin: 0] 8 | import Ecto.Query 9 | import Geo.PostGIS 10 | import Untangle 11 | 12 | def query(Proposal) do 13 | from(c in Proposal, as: :proposal) 14 | end 15 | 16 | def query(:count) do 17 | from(c in Proposal, as: :proposal) 18 | end 19 | 20 | def query(filters), do: query(Proposal, filters) 21 | 22 | def query(q, filters), do: filter(query(q), filters) 23 | 24 | def queries(query, _page_opts, base_filters, data_filters, count_filters) do 25 | base_q = query(query, base_filters) 26 | data_q = filter(base_q, data_filters) 27 | count_q = filter(base_q, count_filters) 28 | {data_q, count_q} 29 | end 30 | 31 | def join_to(q, spec, join_qualifier \\ :left) 32 | 33 | def join_to(q, specs, jq) when is_list(specs) do 34 | Enum.reduce(specs, q, &join_to(&2, &1, jq)) 35 | end 36 | 37 | def join_to(q, :context, jq) do 38 | join(q, jq, [proposal: c], c2 in assoc(c, :context), as: :context) 39 | end 40 | 41 | def join_to(q, :geolocation, jq) do 42 | join(q, jq, [proposal: c], g in assoc(c, :eligible_location), as: :geolocation) 43 | end 44 | 45 | ### filter/2 46 | ## by many 47 | 48 | def filter(q, filters) when is_list(filters) do 49 | Enum.reduce(filters, q, &filter(&2, &1)) 50 | end 51 | 52 | ## by preset 53 | 54 | def filter(q, :default) do 55 | filter(q, [:deleted]) 56 | 57 | # filter q, [:deleted, {:preload, :provider}, {:preload, :receiver}] 58 | end 59 | 60 | ## by join 61 | 62 | def filter(q, {:join, {join, qual}}), do: join_to(q, join, qual) 63 | def filter(q, {:join, join}), do: join_to(q, join) 64 | 65 | ## by user 66 | 67 | def filter(q, {:user, match_admin()}), do: q 68 | 69 | def filter(q, {:user, nil}) do 70 | filter(q, ~w(disabled private)a) 71 | end 72 | 73 | def filter(q, {:user, %{id: user_id}}) do 74 | q 75 | |> where( 76 | [proposal: c], 77 | not is_nil(c.published_at) or c.creator_id == ^user_id 78 | ) 79 | |> filter(~w(disabled)a) 80 | end 81 | 82 | ## by status 83 | 84 | def filter(q, :deleted) do 85 | where(q, [proposal: c], is_nil(c.deleted_at)) 86 | end 87 | 88 | def filter(q, :disabled) do 89 | where(q, [proposal: c], is_nil(c.disabled_at)) 90 | end 91 | 92 | def filter(q, :private) do 93 | where(q, [proposal: c], not is_nil(c.published_at)) 94 | end 95 | 96 | ## by field values 97 | 98 | def filter(q, {:id, id}) when is_binary(id) do 99 | where(q, [proposal: c], c.id == ^id) 100 | end 101 | 102 | def filter(q, {:id, ids}) when is_list(ids) do 103 | where(q, [proposal: c], c.id in ^ids) 104 | end 105 | 106 | def filter(q, {:context_id, id}) when is_binary(id) do 107 | where(q, [proposal: c], c.context_id == ^id) 108 | end 109 | 110 | def filter(q, {:context_id, ids}) when is_list(ids) do 111 | where(q, [proposal: c], c.context_id in ^ids) 112 | end 113 | 114 | def filter(q, {:agent_id, id}) when is_binary(id) do 115 | where(q, [proposal: c], c.creator_id == ^id) 116 | end 117 | 118 | def filter(q, {:agent_id, ids}) when is_list(ids) do 119 | where(q, [proposal: c], c.creator_id in ^ids) 120 | end 121 | 122 | def filter(q, {:eligible_location_id, eligible_location_id}) 123 | when is_binary(eligible_location_id) do 124 | where(q, [proposal: c], c.eligible_location_id == ^eligible_location_id) 125 | end 126 | 127 | def filter(q, {:eligible_location_id, eligible_location_id}) 128 | when is_list(eligible_location_id) do 129 | where(q, [proposal: c], c.eligible_location_id in ^eligible_location_id) 130 | end 131 | 132 | def filter(q, {:near_point, geom_point, :distance_meters, meters}) do 133 | q 134 | |> join_to(:geolocation) 135 | |> where( 136 | [proposal: c, geolocation: g], 137 | st_dwithin_in_meters(g.geom, ^geom_point, ^meters) 138 | ) 139 | end 140 | 141 | ## by ordering 142 | 143 | def filter(q, {:order, :id}) do 144 | filter(q, order: [desc: :id]) 145 | end 146 | 147 | def filter(q, {:order, [desc: :id]}) do 148 | order_by(q, [proposal: c, id: id], 149 | desc: coalesce(id.count, 0), 150 | desc: c.id 151 | ) 152 | end 153 | 154 | # grouping and counting 155 | 156 | def filter(q, {:group_count, key}) when is_atom(key) do 157 | filter(q, group: key, count: key) 158 | end 159 | 160 | def filter(q, {:group, key}) when is_atom(key) do 161 | group_by(q, [proposal: c], field(c, ^key)) 162 | end 163 | 164 | def filter(q, {:count, key}) when is_atom(key) do 165 | select(q, [proposal: c], {field(c, ^key), count(c.id)}) 166 | end 167 | 168 | def filter(q, other_filter), 169 | do: ValueFlows.Util.common_filters(q, other_filter) 170 | end 171 | -------------------------------------------------------------------------------- /lib/proposal/proposed_intent.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Proposal.ProposedIntent do 2 | # use Bonfire.Common.Repo.Schema 3 | use Needle.Pointable, 4 | otp_app: :bonfire_valueflows, 5 | source: "vf_proposed_intent", 6 | table_id: "6VB11SHEDPR0P0SED1NTENT10N" 7 | 8 | alias Ecto.Changeset 9 | alias ValueFlows.Proposal 10 | alias ValueFlows.Planning.Intent 11 | 12 | # @type t :: %__MODULE__{} 13 | 14 | # table_schema "vf_proposed_intent" do 15 | pointable_schema do 16 | # Is this a reciprocal intent of this proposal? rather than primary 17 | # Not meant to be used for intent matching. 18 | # Note: allows null 19 | field(:reciprocal, :boolean) 20 | field(:deleted_at, :utc_datetime_usec) 21 | 22 | # The intent which is part of this published proposal. 23 | belongs_to(:publishes, Intent) 24 | 25 | # The published proposal which this intent is part of. 26 | belongs_to(:published_in, Proposal) 27 | end 28 | 29 | @cast ~w(reciprocal)a 30 | 31 | def changeset(%Proposal{} = published_in, %Intent{} = publishes, %{} = attrs) do 32 | %__MODULE__{} 33 | |> Changeset.cast(attrs, @cast) 34 | |> Changeset.change( 35 | published_in_id: published_in.id, 36 | publishes_id: publishes.id 37 | ) 38 | end 39 | end 40 | -------------------------------------------------------------------------------- /lib/proposal/proposed_intent_graphql.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | if Application.compile_env(:bonfire_api_graphql, :modularity) != :disabled and 3 | Code.ensure_loaded?(Absinthe.Schema.Notation) do 4 | defmodule ValueFlows.Proposal.ProposedIntentGraphQL do 5 | use Absinthe.Schema.Notation 6 | 7 | alias Bonfire.API.GraphQL 8 | 9 | alias Bonfire.API.GraphQL 10 | 11 | alias Bonfire.API.GraphQL.ResolveField 12 | 13 | alias ValueFlows.Proposal.Proposals 14 | 15 | # alias ValueFlows.Proposal.ProposedIntent 16 | 17 | def proposed_intent(%{id: id}, info) do 18 | ResolveField.run(%ResolveField{ 19 | module: __MODULE__, 20 | fetcher: :fetch_proposed_intent, 21 | context: id, 22 | info: info 23 | }) 24 | end 25 | 26 | # FIXME ADD BATCHING, THIS IS NESTED DATA!!!!1!!!one!!! 27 | 28 | def intent_in_proposal_edge(%{id: proposed_intent_id}, _, info) do 29 | with {:ok, proposed_intent} <- 30 | ValueFlows.Proposal.ProposedIntents.one([ 31 | :default, 32 | id: proposed_intent_id 33 | ]) do 34 | ValueFlows.Planning.Intent.GraphQL.intent( 35 | %{id: proposed_intent.publishes_id}, 36 | info 37 | ) 38 | end 39 | end 40 | 41 | def proposal_in_intent_edge(%{id: proposed_intent_id}, _, info) do 42 | with {:ok, proposed_intent} <- 43 | ValueFlows.Proposal.ProposedIntents.one([ 44 | :default, 45 | id: proposed_intent_id 46 | ]) do 47 | ValueFlows.Proposal.GraphQL.proposal( 48 | %{id: proposed_intent.published_in_id}, 49 | info 50 | ) 51 | end 52 | end 53 | 54 | def publishes_edge(%{id: proposal_id}, _, _info) do 55 | ValueFlows.Proposal.ProposedIntents.many([ 56 | :default, 57 | published_in_id: proposal_id 58 | ]) 59 | end 60 | 61 | def publishes_edge(_, _, _info) do 62 | {:ok, nil} 63 | end 64 | 65 | def published_in_edge(%{id: intent_id}, _, _info) do 66 | ValueFlows.Proposal.ProposedIntents.many([ 67 | :default, 68 | publishes_id: intent_id 69 | ]) 70 | end 71 | 72 | def fetch_proposed_intent(_info, id) do 73 | ValueFlows.Proposal.ProposedIntents.one([:default, id: id]) 74 | end 75 | 76 | def propose_intent( 77 | %{ 78 | published_in: published_in_proposal_id, 79 | publishes: publishes_intent_id 80 | } = params, 81 | info 82 | ) do 83 | with :ok <- GraphQL.is_authenticated(info), 84 | {:ok, published_in} <- 85 | ValueFlows.Proposal.GraphQL.proposal( 86 | %{id: published_in_proposal_id}, 87 | info 88 | ), 89 | {:ok, publishes} <- 90 | ValueFlows.Planning.Intent.GraphQL.intent( 91 | %{id: publishes_intent_id}, 92 | info 93 | ), 94 | {:ok, proposed_intent} <- 95 | ValueFlows.Proposal.ProposedIntents.propose_intent( 96 | published_in, 97 | publishes, 98 | params 99 | ) do 100 | {:ok, 101 | %{ 102 | proposed_intent: %{ 103 | proposed_intent 104 | | published_in: published_in, 105 | publishes: publishes 106 | } 107 | }} 108 | end 109 | end 110 | 111 | def delete_proposed_intent(%{id: id}, info) do 112 | with :ok <- GraphQL.is_authenticated(info), 113 | {:ok, proposed_intent} <- proposed_intent(%{id: id}, info), 114 | {:ok, _} <- 115 | ValueFlows.Proposal.ProposedIntents.delete(proposed_intent) do 116 | {:ok, true} 117 | end 118 | end 119 | end 120 | end 121 | -------------------------------------------------------------------------------- /lib/proposal/proposed_intent_queries.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Proposal.ProposedIntentQueries do 2 | import Ecto.Query 3 | import Untangle 4 | 5 | alias ValueFlows.Proposal.ProposedIntent 6 | 7 | def query(ProposedIntent) do 8 | from(pi in ProposedIntent, as: :proposed_intent) 9 | end 10 | 11 | def query(filters), do: query(ProposedIntent, filters) 12 | 13 | def query(q, filters), do: filter(query(q), filters) 14 | 15 | def join_to(q, spec, join_qualifier \\ :left) 16 | 17 | def join_to(q, specs, jq) when is_list(specs) do 18 | Enum.reduce(specs, q, &join_to(&2, &1, jq)) 19 | end 20 | 21 | def join_to(q, :publishes, jq) do 22 | join(q, jq, [proposed_intent: pi], i in assoc(pi, :publishes), as: :publishes) 23 | end 24 | 25 | def join_to(q, :published_in, jq) do 26 | join(q, jq, [proposed_intent: pi], p in assoc(pi, :published_in), as: :published_in) 27 | end 28 | 29 | def filter(q, filters) when is_list(filters) do 30 | Enum.reduce(filters, q, &filter(&2, &1)) 31 | end 32 | 33 | ## joins 34 | def filter(q, {:join, {join, qual}}), do: join_to(q, join, qual) 35 | def filter(q, {:join, join}), do: join_to(q, join) 36 | 37 | # by preset 38 | 39 | def filter(q, :default) do 40 | filter(q, [:deleted]) 41 | end 42 | 43 | def filter(q, :deleted) do 44 | where(q, [proposed_intent: pi], is_nil(pi.deleted_at)) 45 | end 46 | 47 | # by field values 48 | 49 | def filter(q, {:id, id}) when is_binary(id) do 50 | where(q, [proposed_intent: pi], pi.id == ^id) 51 | end 52 | 53 | def filter(q, {:id, ids}) when is_list(ids) do 54 | where(q, [proposed_intent: pi], pi.id in ^ids) 55 | end 56 | 57 | def filter(q, {:publishes_id, id}) when is_binary(id) do 58 | where(q, [proposed_intent: pi], pi.publishes_id == ^id) 59 | end 60 | 61 | def filter(q, {:publishes_id, ids}) when is_list(ids) do 62 | where(q, [proposed_intent: pi], pi.publishes_id in ^ids) 63 | end 64 | 65 | def filter(q, {:published_in_id, id}) when is_binary(id) do 66 | where(q, [proposed_intent: pi], pi.published_in_id == ^id) 67 | end 68 | 69 | def filter(q, {:published_in_id, ids}) when is_list(ids) do 70 | where(q, [proposed_intent: pi], pi.published_in_id in ^ids) 71 | end 72 | 73 | # grouping 74 | 75 | def filter(q, {:group_count, key}) when is_atom(key) do 76 | filter(q, group: key, count: key) 77 | end 78 | 79 | def filter(q, {:group, key}) when is_atom(key) do 80 | group_by(q, [proposed_intent: c], field(c, ^key)) 81 | end 82 | 83 | def filter(q, {:count, key}) when is_atom(key) do 84 | select(q, [proposed_intent: c], {field(c, ^key), count(c.id)}) 85 | end 86 | 87 | def filter(q, other_filter), 88 | do: ValueFlows.Util.common_filters(q, other_filter) 89 | end 90 | -------------------------------------------------------------------------------- /lib/proposal/proposed_intents.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Proposal.ProposedIntents do 3 | use Bonfire.Common.Utils, only: [maybe: 2] 4 | 5 | import Bonfire.Common.Config, only: [repo: 0] 6 | # alias Bonfire.API.GraphQL 7 | alias Bonfire.API.GraphQL.Fields 8 | alias Bonfire.API.GraphQL.Page 9 | 10 | alias ValueFlows.Proposal 11 | alias ValueFlows.Proposal 12 | 13 | alias ValueFlows.Proposal.ProposedTo 14 | alias ValueFlows.Proposal.ProposedToQueries 15 | alias ValueFlows.Proposal.ProposedIntentQueries 16 | alias ValueFlows.Proposal.ProposedIntent 17 | alias ValueFlows.Proposal.Queries 18 | 19 | alias ValueFlows.Planning.Intent 20 | 21 | @behaviour Bonfire.Federate.ActivityPub.FederationModules 22 | def federation_module, do: ["ValueFlows:ProposedIntent", "ProposedIntent"] 23 | 24 | @spec one(filters :: [any]) :: {:ok, ProposedIntent.t()} | {:error, term} 25 | def one(filters), 26 | do: repo().single(ProposedIntentQueries.query(ProposedIntent, filters)) 27 | 28 | @spec many(filters :: [any]) :: {:ok, [ProposedIntent.t()]} | {:error, term} 29 | def many(filters \\ []), 30 | do: {:ok, repo().many(ProposedIntentQueries.query(ProposedIntent, filters))} 31 | 32 | @spec propose_intent(Proposal.t(), Intent.t(), map) :: 33 | {:ok, ProposedIntent.t()} | {:error, term} 34 | def propose_intent(%Proposal{} = proposal, %Intent{} = intent, attrs) do 35 | with {:ok, proposed_intent} <- 36 | repo().insert(ProposedIntent.changeset(proposal, intent, attrs)) do 37 | {:ok, 38 | proposed_intent 39 | |> Map.put(:publishes, intent) 40 | |> Map.put(:published_in, proposal)} 41 | end 42 | end 43 | 44 | def create(_creator, %{published_in: proposal, publishes: intent} = attrs) do 45 | propose_intent(proposal, intent, attrs) 46 | end 47 | 48 | @spec delete(ProposedIntent.t()) :: {:ok, ProposedIntent.t()} | {:error, term} 49 | def delete(%ProposedIntent{} = proposed_intent) do 50 | Bonfire.Common.Repo.Delete.soft_delete(proposed_intent) 51 | end 52 | 53 | def ap_publish_activity(subject, activity_name, thing) do 54 | ValueFlows.Util.Federation.ap_publish_activity( 55 | subject, 56 | activity_name, 57 | :proposal, 58 | thing, 59 | 4, 60 | [ 61 | :published_in 62 | ] 63 | ) 64 | end 65 | 66 | def ap_receive_activity(creator, activity, object) do 67 | IO.inspect(object, label: "ap_receive_activity - handle ProposedIntent") 68 | 69 | ValueFlows.Util.Federation.ap_receive_activity( 70 | creator, 71 | activity, 72 | object, 73 | &create/2 74 | ) 75 | end 76 | end 77 | -------------------------------------------------------------------------------- /lib/proposal/proposed_to.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Proposal.ProposedTo do 2 | # use Bonfire.Common.Repo.Schema 3 | use Needle.Pointable, 4 | otp_app: :bonfire_valueflows, 5 | source: "vf_proposed_to", 6 | table_id: "6R0P0SA1HASBEENADDRESSEDT0" 7 | 8 | alias Ecto.Changeset 9 | alias ValueFlows.Proposal 10 | 11 | # @type t :: %__MODULE__{} 12 | 13 | # table_schema "vf_proposed_to" do 14 | pointable_schema do 15 | field(:deleted_at, :utc_datetime_usec) 16 | belongs_to(:proposed_to, Pointer) 17 | belongs_to(:proposed, Proposal) 18 | end 19 | 20 | def changeset(%{id: _} = proposed_to, %Proposal{} = proposed) do 21 | %__MODULE__{} 22 | |> Changeset.cast(%{}, []) 23 | |> Changeset.change( 24 | proposed_to_id: proposed_to.id, 25 | proposed_id: proposed.id 26 | ) 27 | end 28 | end 29 | -------------------------------------------------------------------------------- /lib/proposal/proposed_to_graphql.ex: -------------------------------------------------------------------------------- 1 | if Application.compile_env(:bonfire_api_graphql, :modularity) != :disabled and 2 | Code.ensure_loaded?(Absinthe.Schema.Notation) do 3 | defmodule ValueFlows.Proposal.ProposedToGraphQL do 4 | use Absinthe.Schema.Notation 5 | 6 | alias Bonfire.API.GraphQL 7 | alias Bonfire.Common.Needles 8 | alias ValueFlows.Proposal.Proposals 9 | 10 | alias Bonfire.API.GraphQL.ResolveField 11 | 12 | import Bonfire.Common.Config, only: [repo: 0] 13 | 14 | def proposed_to(%{id: id}, info) do 15 | ResolveField.run(%ResolveField{ 16 | module: __MODULE__, 17 | fetcher: :fetch_proposed_to, 18 | context: id, 19 | info: info 20 | }) 21 | end 22 | 23 | def fetch_proposed_to(_info, id) do 24 | ValueFlows.Proposal.ProposedTos.one([:default, id: id]) 25 | end 26 | 27 | def published_to_edge(%{id: id}, _, _info) when not is_nil(id) do 28 | ValueFlows.Proposal.ProposedTos.many([:default, proposed_id: id]) 29 | end 30 | 31 | def published_to_edge(_, _, _info) do 32 | {:ok, nil} 33 | end 34 | 35 | def proposed_to_agent(%{proposed_to_id: id}, _, _info) 36 | when not is_nil(id) do 37 | {:ok, ValueFlows.Agent.Agents.agent(id, nil)} 38 | end 39 | 40 | def proposed_to_agent(_, _, _info) do 41 | {:ok, nil} 42 | end 43 | 44 | def fetch_proposed_edge(%{proposed_id: id} = thing, _, _) 45 | when is_binary(id) do 46 | thing = repo().preload(thing, :proposed) 47 | {:ok, Map.get(thing, :proposed)} 48 | end 49 | 50 | def fetch_proposed_edge(_, _, _) do 51 | {:ok, nil} 52 | end 53 | 54 | def propose_to(%{proposed_to: agent_id, proposed: proposed_id}, info) do 55 | with :ok <- GraphQL.is_authenticated(info), 56 | {:ok, agent} <- Needles.get(agent_id), 57 | {:ok, proposed} <- 58 | ValueFlows.Proposal.GraphQL.proposal(%{id: proposed_id}, info), 59 | {:ok, proposed_to} <- 60 | ValueFlows.Proposal.ProposedTos.propose_to(agent, proposed) do 61 | {:ok, %{proposed_to: %{proposed_to | proposed_to: agent, proposed: proposed}}} 62 | end 63 | end 64 | 65 | def delete_proposed_to(%{id: id}, info) do 66 | with :ok <- GraphQL.is_authenticated(info), 67 | {:ok, proposed_to} <- proposed_to(%{id: id}, info), 68 | {:ok, _} <- ValueFlows.Proposal.ProposedTos.delete(proposed_to) do 69 | {:ok, true} 70 | end 71 | end 72 | 73 | # def validate_context(pointer) do 74 | # if Needle.Pointers.table!(pointer).schema in valid_contexts() do 75 | # :ok 76 | # else 77 | # GraphQL.not_permitted("agent") 78 | # end 79 | # end 80 | 81 | # def valid_contexts do 82 | # Bonfire.Common.Config.get_ext(:bonfire_valueflows, :valid_agent_schemas, [ValueFlows.Util.user_schema()]) 83 | # end 84 | end 85 | end 86 | -------------------------------------------------------------------------------- /lib/proposal/proposed_to_queries.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Proposal.ProposedToQueries do 2 | import Ecto.Query 3 | import Untangle 4 | 5 | alias ValueFlows.Proposal.ProposedTo 6 | 7 | def query(ProposedTo) do 8 | from(pt in ProposedTo, as: :proposed_to) 9 | end 10 | 11 | def query(filters), do: query(ProposedTo, filters) 12 | 13 | def query(q, filters), do: filter(query(q), filters) 14 | 15 | def join_to(q, spec, join_qualifier \\ :left) 16 | 17 | def join_to(q, specs, jq) when is_list(specs) do 18 | Enum.reduce(specs, q, &join_to(&2, &1, jq)) 19 | end 20 | 21 | def filter(q, filters) when is_list(filters) do 22 | Enum.reduce(filters, q, &filter(&2, &1)) 23 | end 24 | 25 | ## joins 26 | def filter(q, {:join, {join, qual}}), do: join_to(q, join, qual) 27 | def filter(q, {:join, join}), do: join_to(q, join) 28 | 29 | # by preset 30 | 31 | def filter(q, :default) do 32 | filter(q, [:deleted]) 33 | end 34 | 35 | def filter(q, :deleted) do 36 | where(q, [proposed_to: pt], is_nil(pt.deleted_at)) 37 | end 38 | 39 | # by field values 40 | 41 | def filter(q, {:id, id}) when is_binary(id) do 42 | where(q, [proposed_to: pt], pt.id == ^id) 43 | end 44 | 45 | def filter(q, {:id, ids}) when is_list(ids) do 46 | where(q, [proposed_to: pt], pt.id in ^ids) 47 | end 48 | 49 | def filter(q, {:proposed_to_id, id}) when is_binary(id) do 50 | where(q, [proposed_to: pt], pt.proposed_to_id == ^id) 51 | end 52 | 53 | def filter(q, {:proposed_to_id, ids}) when is_list(ids) do 54 | where(q, [proposed_to: pt], pt.proposed_to_id in ^ids) 55 | end 56 | 57 | def filter(q, {:proposed_id, id}) when is_binary(id) do 58 | where(q, [proposed_to: pt], pt.proposed_id == ^id) 59 | end 60 | 61 | def filter(q, {:proposed_id, ids}) when is_list(ids) do 62 | where(q, [proposed_to: pt], pt.proposed_id in ^ids) 63 | end 64 | 65 | # grouping 66 | 67 | def filter(q, {:group_count, key}) when is_atom(key) do 68 | filter(q, group: key, count: key) 69 | end 70 | 71 | def filter(q, {:group, key}) when is_atom(key) do 72 | group_by(q, [proposed_to: pt], field(pt, ^key)) 73 | end 74 | 75 | def filter(q, {:count, key}) when is_atom(key) do 76 | select(q, [proposed_to: pt], {field(pt, ^key), count(pt.id)}) 77 | end 78 | 79 | def filter(q, other_filter), 80 | do: ValueFlows.Util.common_filters(q, other_filter) 81 | end 82 | -------------------------------------------------------------------------------- /lib/proposal/proposed_tos.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Proposal.ProposedTos do 3 | use Bonfire.Common.Utils, only: [maybe: 2] 4 | 5 | import Bonfire.Common.Config, only: [repo: 0] 6 | # alias Bonfire.API.GraphQL 7 | alias Bonfire.API.GraphQL.Fields 8 | alias Bonfire.API.GraphQL.Page 9 | 10 | alias ValueFlows.Proposal 11 | alias ValueFlows.Proposal 12 | 13 | alias ValueFlows.Proposal.ProposedTo 14 | alias ValueFlows.Proposal.ProposedToQueries 15 | alias ValueFlows.Proposal.ProposedIntentQueries 16 | alias ValueFlows.Proposal.ProposedIntent 17 | alias ValueFlows.Proposal.Queries 18 | 19 | alias ValueFlows.Planning.Intent 20 | 21 | @behaviour Bonfire.Federate.ActivityPub.FederationModules 22 | def federation_module, do: ["ValueFlows:ProposedTo", "ProposedTo"] 23 | 24 | @spec one(filters :: [any]) :: {:ok, ProposedTo.t()} | {:error, term} 25 | def one(filters), 26 | do: repo().single(ProposedToQueries.query(ProposedTo, filters)) 27 | 28 | @spec many(filters :: [any]) :: {:ok, [ProposedTo]} | {:error, term} 29 | def many(filters \\ []), 30 | do: {:ok, repo().many(ProposedToQueries.query(ProposedTo, filters))} 31 | 32 | # if you like it then you should put a ring on it 33 | @spec propose_to(any, Proposal.t()) :: {:ok, ProposedTo.t()} | {:error, term} 34 | def propose_to(proposed_to, %Proposal{} = proposed) do 35 | repo().insert(ProposedTo.changeset(proposed_to, proposed)) 36 | end 37 | 38 | @spec delete(ProposedTo.t()) :: {:ok, ProposedTo.t()} | {:error, term} 39 | def delete(proposed_to), 40 | do: Bonfire.Common.Repo.Delete.soft_delete(proposed_to) 41 | 42 | def ap_publish_activity(subject, activity_name, thing) do 43 | ValueFlows.Util.Federation.ap_publish_activity( 44 | subject, 45 | activity_name, 46 | :proposed_to, 47 | thing, 48 | 4, 49 | [ 50 | :published_in 51 | ] 52 | ) 53 | end 54 | 55 | def ap_receive_activity(creator, activity, object) do 56 | IO.inspect(object, label: "ap_receive_activity - handle ProposedTo") 57 | # TODO 58 | # ValueFlows.Util.Federation.ap_receive_activity(creator, activity, object, &create/2) 59 | nil 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /lib/recipe/graphql.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | if Application.compile_env(:bonfire_api_graphql, :modularity) != :disabled do 3 | defmodule ValueFlows.Recipe.GraphQL do 4 | import Untangle 5 | 6 | # use Absinthe.Schema.Notation 7 | # import_sdl path: "lib/value_flows/graphql/schemas/recipe.gql" 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /lib/scenario/graphql.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | if Application.compile_env(:bonfire_api_graphql, :modularity) != :disabled do 3 | defmodule ValueFlows.Scenario.GraphQL do 4 | import Untangle 5 | 6 | # use Absinthe.Schema.Notation 7 | # import_sdl path: "lib/value_flows/graphql/schemas/scenario.gql" 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /lib/schemas: -------------------------------------------------------------------------------- 1 | ../../../../vf-graphql-temp-fork/lib/schemas/ -------------------------------------------------------------------------------- /lib/value_calculation/graphql.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | if Code.ensure_loaded?(Bonfire.API.GraphQL) do 3 | defmodule ValueFlows.ValueCalculation.GraphQL do 4 | import Bonfire.Common.Config, only: [repo: 0] 5 | 6 | alias Bonfire.API.GraphQL 7 | alias Bonfire.API.GraphQL.FetchPage 8 | alias Bonfire.API.GraphQL.ResolveField 9 | alias Bonfire.API.GraphQL.ResolveRootPage 10 | 11 | alias ValueFlows.ValueCalculation.ValueCalculations 12 | 13 | def value_calculation(%{id: id}, info) do 14 | ResolveField.run(%ResolveField{ 15 | module: __MODULE__, 16 | fetcher: :fetch_value_calculation, 17 | context: id, 18 | info: info 19 | }) 20 | end 21 | 22 | def value_calculations(page_opts, info) do 23 | ResolveRootPage.run(%ResolveRootPage{ 24 | module: __MODULE__, 25 | fetcher: :fetch_value_calculations, 26 | page_opts: page_opts, 27 | info: info, 28 | cursor_validators: [ 29 | &(is_integer(&1) and &1 >= 0), 30 | &Needle.UID.cast/1 31 | ] 32 | }) 33 | end 34 | 35 | def fetch_value_calculation(info, id) do 36 | ValueCalculations.one([ 37 | :default, 38 | id: id, 39 | creator: GraphQL.current_user(info) 40 | ]) 41 | end 42 | 43 | def fetch_value_calculations(page_opts, info) do 44 | filters = info |> Map.get(:data_filters, %{}) |> Keyword.new() 45 | 46 | FetchPage.run(%FetchPage{ 47 | queries: ValueFlows.ValueCalculation.Queries, 48 | query: ValueFlows.ValueCalculation, 49 | page_opts: page_opts, 50 | cursor_fn: & &1.id, 51 | base_filters: [ 52 | :default, 53 | creator: GraphQL.current_user(info) 54 | ], 55 | data_filters: [filters ++ [paginate_id: page_opts]] 56 | }) 57 | end 58 | 59 | def value_action_edge(thing, opts, info) do 60 | thing 61 | |> Bonfire.Common.Enums.map_key_replace(:value_action_id, :action_id) 62 | |> ValueFlows.Knowledge.Action.GraphQL.action_edge(opts, info) 63 | end 64 | 65 | def value_unit_edge(%{value_unit_id: id} = thing, _, _) 66 | when is_binary(id) do 67 | thing = repo().preload(thing, :value_unit) 68 | {:ok, Map.get(thing, :value_unit)} 69 | end 70 | 71 | def value_unit_edge(_, _, _) do 72 | {:ok, nil} 73 | end 74 | 75 | def resource_conforms_to_edge(%{resource_conforms_to_id: id} = thing, _, _) 76 | when is_binary(id) do 77 | thing = repo().preload(thing, :resource_conforms_to) 78 | {:ok, Map.get(thing, :resource_conforms_to)} 79 | end 80 | 81 | def resource_conforms_to_edge(_, _, _) do 82 | {:ok, nil} 83 | end 84 | 85 | def value_resource_conforms_to_edge( 86 | %{value_resource_conforms_to_id: id} = thing, 87 | _, 88 | _ 89 | ) 90 | when is_binary(id) do 91 | thing = repo().preload(thing, :value_resource_conforms_to) 92 | {:ok, Map.get(thing, :value_resource_conforms_to)} 93 | end 94 | 95 | def value_resource_conforms_to_edge(_, _, _) do 96 | {:ok, nil} 97 | end 98 | 99 | def create_value_calculation(%{value_calculation: attrs}, info) do 100 | with {:ok, user} <- GraphQL.current_user_or_not_logged_in(info), 101 | {:ok, value_calculation} <- ValueCalculations.create(user, attrs) do 102 | {:ok, %{value_calculation: value_calculation}} 103 | end 104 | end 105 | 106 | def update_value_calculation(%{value_calculation: %{id: id} = attrs}, info) do 107 | with :ok <- GraphQL.is_authenticated(info), 108 | {:ok, value_calculation} <- value_calculation(%{id: id}, info), 109 | {:ok, value_calculation} <- 110 | ValueCalculations.update(value_calculation, attrs) do 111 | {:ok, %{value_calculation: value_calculation}} 112 | end 113 | end 114 | 115 | def delete_value_calculation(%{id: id}, info) do 116 | with :ok <- GraphQL.is_authenticated(info), 117 | {:ok, value_calculation} <- value_calculation(%{id: id}, info), 118 | {:ok, _} <- ValueCalculations.soft_delete(value_calculation) do 119 | {:ok, true} 120 | end 121 | end 122 | end 123 | end 124 | -------------------------------------------------------------------------------- /lib/value_calculation/migrations.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.ValueCalculation.Migrations do 3 | @moduledoc false 4 | use Ecto.Migration 5 | 6 | import Needle.Migration 7 | 8 | def up do 9 | create_pointable_table(ValueFlows.ValueCalculation) do 10 | add(:name, :text, null: true) 11 | add(:note, :text, null: true) 12 | add(:formula, :text, null: false) 13 | 14 | add(:creator_id, weak_pointer(ValueFlows.Util.user_schema()), null: true) 15 | add(:context_id, weak_pointer(), null: true) 16 | add(:value_unit_id, weak_pointer(Bonfire.Quantify.Unit), null: false) 17 | 18 | add(:action_id, :string, null: false) 19 | add(:value_action_id, :string, null: false) 20 | 21 | add( 22 | :resource_conforms_to_id, 23 | weak_pointer(ValueFlows.Knowledge.ResourceSpecification) 24 | ) 25 | 26 | add( 27 | :value_resource_conforms_to_id, 28 | weak_pointer(ValueFlows.Knowledge.ResourceSpecification) 29 | ) 30 | 31 | add(:deleted_at, :timestamptz) 32 | 33 | timestamps(inserted_at: false, type: :utc_datetime_usec) 34 | end 35 | end 36 | 37 | def down do 38 | drop_pointable_table(ValueFlows.ValueCalculation) 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /lib/value_calculation/value_calculation.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.ValueCalculation do 3 | use Needle.Pointable, 4 | otp_app: :bonfire_valueflows, 5 | source: "vf_value_calculation", 6 | table_id: "3A1VEF10WSVA1VECA1CV1AT10N" 7 | 8 | alias Ecto.Changeset 9 | 10 | # @type t :: %__MODULE__{} 11 | 12 | pointable_schema do 13 | field(:name, :string) 14 | field(:note, :string) 15 | field(:formula, :string) 16 | field(:resource_classified_as, {:array, :string}, virtual: true) 17 | 18 | belongs_to(:creator, ValueFlows.Util.user_schema()) 19 | belongs_to(:context, Needle.Pointer) 20 | belongs_to(:value_unit, Bonfire.Quantify.Unit) 21 | belongs_to(:action, ValueFlows.Actions.Action, type: :string) 22 | belongs_to(:value_action, ValueFlows.Actions.Action, type: :string) 23 | 24 | belongs_to( 25 | :resource_conforms_to, 26 | ValueFlows.Knowledge.ResourceSpecification 27 | ) 28 | 29 | belongs_to( 30 | :value_resource_conforms_to, 31 | ValueFlows.Knowledge.ResourceSpecification 32 | ) 33 | 34 | field(:deleted_at, :utc_datetime_usec) 35 | 36 | timestamps(inserted_at: false) 37 | end 38 | 39 | @required ~w(formula action_id value_action_id value_unit_id)a 40 | @cast @required ++ 41 | ~w(name note context_id resource_conforms_to_id value_resource_conforms_to_id)a 42 | 43 | def create_changeset(%{} = creator, %{} = attrs) do 44 | %__MODULE__{} 45 | |> Changeset.cast(attrs, @cast) 46 | |> Changeset.change(creator_id: creator.id) 47 | |> Changeset.validate_required(@required) 48 | end 49 | 50 | def update_changeset(%__MODULE__{} = calculation, %{} = attrs) do 51 | Changeset.cast(calculation, attrs, @cast) 52 | end 53 | 54 | @behaviour Bonfire.Common.SchemaModule 55 | def query_module, do: ValueFlows.ValueCalculation.Queries 56 | 57 | def follow_filters, do: [:default] 58 | end 59 | -------------------------------------------------------------------------------- /lib/value_calculation/value_calculations.ex: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.ValueCalculation.ValueCalculations do 3 | use Arrows 4 | use Bonfire.Common.Utils, only: [maybe: 2] 5 | 6 | import Bonfire.Common.Config, only: [repo: 0] 7 | 8 | alias ValueFlows.ValueCalculation 9 | alias ValueFlows.ValueCalculation.Queries 10 | 11 | alias ValueFlows.EconomicEvent 12 | alias ValueFlows.Observe.Observations 13 | 14 | def one(filters), do: repo().single(Queries.query(ValueCalculation, filters)) 15 | 16 | def many(filters \\ []), 17 | do: {:ok, repo().many(Queries.query(ValueCalculation, filters))} 18 | 19 | def preload_all(%ValueCalculation{} = calculation) do 20 | # should always succeed 21 | {:ok, calculation} = one(id: calculation.id, preload: :all) 22 | calculation 23 | end 24 | 25 | @doc "Apply the value calculation to a context" 26 | def apply_to(%EconomicEvent{} = event, %ValueCalculation{formula: formula} = _calc) do 27 | # TODO: consider other libs like https://github.com/narrowtux/abacus 28 | # see https://elixirforum.com/t/expression-evaluate-user-input-expressions/61126 29 | Formula2.parse_and_eval(formula, formula_env(event)) 30 | ~> Formula2.decimal_to_float() 31 | end 32 | 33 | def create(%{} = user, attrs) do 34 | attrs = prepare_attrs(attrs) 35 | 36 | with :ok <- prepare_formula(attrs), 37 | {:ok, calculation} <- 38 | repo().insert(ValueCalculation.create_changeset(user, attrs)) do 39 | {:ok, preload_all(calculation)} 40 | end 41 | end 42 | 43 | def update(%ValueCalculation{} = calculation, attrs) do 44 | attrs = prepare_attrs(attrs) 45 | 46 | with :ok <- prepare_formula(attrs), 47 | {:ok, calculation} <- 48 | repo().update(ValueCalculation.update_changeset(calculation, attrs)) do 49 | {:ok, preload_all(calculation)} 50 | end 51 | end 52 | 53 | def soft_delete(%ValueCalculation{} = calculation) do 54 | Bonfire.Common.Repo.Delete.soft_delete(calculation) 55 | end 56 | 57 | defp formula_context(:event), 58 | do: ["resourceQuantity", "effortQuantity", "quality"] 59 | 60 | defp formula_env(%EconomicEvent{} = event) do 61 | resource_id = 62 | Map.get( 63 | event, 64 | :resource_inventoried_as_id, 65 | Map.get(event, :to_resource_inventoried_as_id) 66 | ) 67 | 68 | observation = 69 | if resource_id do 70 | case Observations.one([ 71 | :default, 72 | preload: :all, 73 | has_feature_of_interest: resource_id, 74 | order: :id, 75 | limit: 1 76 | ]) do 77 | {:ok, x} -> repo().preload(x, [:result_phenomenon]) 78 | _ -> nil 79 | end 80 | end 81 | 82 | ValueFlows.Util.map_values( 83 | %{ 84 | "resourceQuantity" => event.resource_quantity.has_numerical_value, 85 | "effortQuantity" => event.effort_quantity.has_numerical_value, 86 | "quality" => 87 | if is_nil(observation) do 88 | 0 89 | else 90 | observation.result_phenomenon.extra_info["formula_quantifier"] 91 | end 92 | }, 93 | &Formula2.float_to_decimal/1 94 | ) 95 | end 96 | 97 | defp prepare_formula(%{formula: formula}) do 98 | Formula2.parse_and_validate(formula, formula_context(:event), formula2_options()) 99 | end 100 | 101 | defp prepare_formula(_attrs), do: :ok 102 | 103 | defp prepare_attrs(attrs) do 104 | attrs 105 | |> Enums.maybe_put( 106 | :context_id, 107 | attrs |> Map.get(:in_scope_of) |> maybe(&List.first/1) 108 | ) 109 | |> Enums.maybe_put(:value_unit_id, Enums.attr_get_id(attrs, :value_unit)) 110 | |> Enums.maybe_put( 111 | :action_id, 112 | Enums.attr_get_id(attrs, :action) |> ValueFlows.Knowledge.Action.Actions.id() 113 | ) 114 | |> Enums.maybe_put(:value_action_id, attrs[:value_action]) 115 | |> Enums.maybe_put( 116 | :resource_conforms_to_id, 117 | Enums.attr_get_id(attrs, :resource_conforms_to) 118 | ) 119 | |> Enums.maybe_put( 120 | :value_resource_conforms_to_id, 121 | Enums.attr_get_id(attrs, :value_resource_conforms_to) 122 | ) 123 | end 124 | 125 | if Application.compile_env!(:bonfire, :env) == :test do 126 | defp formula2_options, do: [max_runs: 100] 127 | else 128 | defp formula2_options, do: [max_runs: 1_000] 129 | end 130 | end 131 | -------------------------------------------------------------------------------- /lib/value_flows.ex: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows do 2 | @moduledoc "./README.md" |> File.stream!() |> Enum.drop(1) |> Enum.join() 3 | end 4 | -------------------------------------------------------------------------------- /mess.exs: -------------------------------------------------------------------------------- 1 | # Copyright (c) 2020 James Laver 2 | # Copyright (c) 2025 Bonfire contributors 3 | # 4 | # This Source Code Form is subject to the terms of the Mozilla Public 5 | # License, v. 2.0. If a copy of the MPL was not distributed with this 6 | # file, You can obtain one at http://mozilla.org/MPL/2.0/. 7 | if not Code.ensure_loaded?(Mess) do 8 | defmodule Mess do 9 | @sources [path: "deps.path", git: "deps.git", hex: "deps.hex"] 10 | 11 | defp newline, do: ~r/(?:\r\n|[\r\n])/ 12 | defp parser, do: ~r/^(?\s*)((?[a-z_][a-z0-9_]+)\s*=\s*"(?[^"]+)")?(?.*)/ 13 | defp git_branch, do: ~r/(?[^#]+)(#(?.+))?/ 14 | 15 | def deps(sources \\ @sources, extra_deps \\ []), 16 | do: deps(Enum.flat_map(sources, fn {k, v} -> read(v, k) end), extra_deps, :deps) 17 | 18 | defp deps(packages, extra_deps, :deps), 19 | do: deps(Enum.flat_map(packages, &dep_spec/1), extra_deps, :uniq) 20 | 21 | defp deps(packages, extra_deps, :uniq), 22 | do: Enum.uniq_by(packages ++ extra_deps, &elem(&1, 0)) 23 | 24 | defp read(path, kind) when is_binary(path), do: read(File.read(path), kind) 25 | defp read({:error, :enoent}, _kind), do: [] 26 | 27 | defp read({:ok, file}, kind), 28 | do: Enum.map(String.split(file, newline()), &read_line(&1, kind)) 29 | 30 | defp read_line(line, kind), 31 | do: Map.put(Regex.named_captures(parser(), line), :kind, kind) 32 | 33 | defp dep_spec(%{"package" => ""}), do: [] 34 | 35 | defp dep_spec(%{"package" => p, "value" => v, :kind => :hex}), 36 | do: pkg(p, v, override: true) 37 | 38 | defp dep_spec(%{"package" => p, "value" => v, :kind => :path}), 39 | do: pkg(p, path: v, override: true) 40 | 41 | defp dep_spec(%{"package" => p, "value" => v, :kind => :git}), do: git(v, p) 42 | 43 | defp git(line, p) when is_binary(line), 44 | do: git(Regex.named_captures(git_branch(), line), p) 45 | 46 | defp git(%{"branch" => "", "repo" => r}, p), 47 | do: pkg(p, git: r, override: true) 48 | 49 | defp git(%{"branch" => b, "repo" => r}, p), 50 | do: pkg(p, git: r, branch: b, override: true) 51 | 52 | defp pkg(name, opts), do: [{String.to_atom(name), opts}] 53 | defp pkg(name, version, opts), do: [{String.to_atom(name), version, opts}] 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | Code.eval_file("mess.exs", (if File.exists?("../../lib/mix/mess.exs"), do: "../../lib/mix/")) 2 | 3 | defmodule Bonfire.ValueFlows.MixProject do 4 | use Mix.Project 5 | 6 | def project do 7 | if System.get_env("AS_UMBRELLA") == "1" do 8 | [ 9 | build_path: "../../_build", 10 | config_path: "../../config/config.exs", 11 | deps_path: "../../deps", 12 | lockfile: "../../mix.lock" 13 | ] 14 | else 15 | [] 16 | end 17 | ++ 18 | [ 19 | app: :bonfire_valueflows, 20 | version: "0.1.0", 21 | elixir: "~> 1.10", 22 | elixirc_paths: elixirc_paths(Mix.env()), 23 | compilers: Mix.compilers(), 24 | start_permanent: Mix.env() == :prod, 25 | aliases: aliases(), 26 | deps: 27 | Mess.deps([ 28 | {:floki, ">= 0.0.0", only: [:dev, :test]}, 29 | {:bonfire_api_graphql, 30 | git: "https://github.com/bonfire-networks/bonfire_api_graphql", 31 | 32 | optional: true, runtime: false}, 33 | {:activity_pub, 34 | git: "https://github.com/bonfire-networks/activitypub", 35 | branch: "develop", 36 | optional: true, runtime: false}, 37 | {:bonfire_search, 38 | git: "https://github.com/bonfire-networks/bonfire_search", 39 | 40 | optional: true, runtime: false}, 41 | {:zest, "~> 0.1", only: :test} 42 | ]) 43 | ] 44 | end 45 | 46 | def application, do: [extra_applications: [:logger, :runtime_tools]] 47 | 48 | defp elixirc_paths(:test), do: ["lib", "test/support"] 49 | defp elixirc_paths(_), do: ["lib"] 50 | 51 | defp aliases do 52 | [ 53 | "hex.setup": ["local.hex --force"], 54 | "rebar.setup": ["local.rebar --force"], 55 | "js.deps.get": ["cmd npm install --prefix assets"], 56 | "ecto.seeds": ["run priv/repo/seeds.exs"], 57 | setup: [ 58 | "hex.setup", 59 | "rebar.setup", 60 | "deps.get", 61 | "ecto.setup", 62 | "js.deps.get" 63 | ], 64 | updates: ["deps.get", "ecto.migrate", "js.deps.get"], 65 | "ecto.setup": ["ecto.create", "ecto.migrate", "ecto.seeds"], 66 | "ecto.reset": ["ecto.drop", "ecto.setup"], 67 | test: ["ecto.create --quiet", "ecto.migrate --quiet", "test"] 68 | ] 69 | end 70 | end 71 | -------------------------------------------------------------------------------- /priv/gettext/en/LC_MESSAGES/errors.po: -------------------------------------------------------------------------------- 1 | ## `msgid`s in this file come from POT (.pot) files. 2 | ## 3 | ## Do not add, change, or remove `msgid`s manually here as 4 | ## they're tied to the ones in the corresponding POT file 5 | ## (with the same domain). 6 | ## 7 | ## Use `mix gettext.extract --merge` or `mix gettext.merge` 8 | ## to merge POT files into PO files. 9 | msgid "" 10 | msgstr "" 11 | "Language: en\n" 12 | 13 | ## From Ecto.Changeset.cast/4 14 | msgid "can't be blank" 15 | msgstr "" 16 | 17 | ## From Ecto.Changeset.unique_constraint/3 18 | msgid "has already been taken" 19 | msgstr "" 20 | 21 | ## From Ecto.Changeset.put_change/3 22 | msgid "is invalid" 23 | msgstr "" 24 | 25 | ## From Ecto.Changeset.validate_acceptance/3 26 | msgid "must be accepted" 27 | msgstr "" 28 | 29 | ## From Ecto.Changeset.validate_format/3 30 | msgid "has invalid format" 31 | msgstr "" 32 | 33 | ## From Ecto.Changeset.validate_subset/3 34 | msgid "has an invalid entry" 35 | msgstr "" 36 | 37 | ## From Ecto.Changeset.validate_exclusion/3 38 | msgid "is reserved" 39 | msgstr "" 40 | 41 | ## From Ecto.Changeset.validate_confirmation/3 42 | msgid "does not match confirmation" 43 | msgstr "" 44 | 45 | ## From Ecto.Changeset.no_assoc_constraint/3 46 | msgid "is still associated with this entry" 47 | msgstr "" 48 | 49 | msgid "are still associated with this entry" 50 | msgstr "" 51 | 52 | ## From Ecto.Changeset.validate_length/3 53 | msgid "should be %{count} character(s)" 54 | msgid_plural "should be %{count} character(s)" 55 | msgstr[0] "" 56 | msgstr[1] "" 57 | 58 | msgid "should have %{count} item(s)" 59 | msgid_plural "should have %{count} item(s)" 60 | msgstr[0] "" 61 | msgstr[1] "" 62 | 63 | msgid "should be at least %{count} character(s)" 64 | msgid_plural "should be at least %{count} character(s)" 65 | msgstr[0] "" 66 | msgstr[1] "" 67 | 68 | msgid "should have at least %{count} item(s)" 69 | msgid_plural "should have at least %{count} item(s)" 70 | msgstr[0] "" 71 | msgstr[1] "" 72 | 73 | msgid "should be at most %{count} character(s)" 74 | msgid_plural "should be at most %{count} character(s)" 75 | msgstr[0] "" 76 | msgstr[1] "" 77 | 78 | msgid "should have at most %{count} item(s)" 79 | msgid_plural "should have at most %{count} item(s)" 80 | msgstr[0] "" 81 | msgstr[1] "" 82 | 83 | ## From Ecto.Changeset.validate_number/3 84 | msgid "must be less than %{number}" 85 | msgstr "" 86 | 87 | msgid "must be greater than %{number}" 88 | msgstr "" 89 | 90 | msgid "must be less than or equal to %{number}" 91 | msgstr "" 92 | 93 | msgid "must be greater than or equal to %{number}" 94 | msgstr "" 95 | 96 | msgid "must be equal to %{number}" 97 | msgstr "" 98 | -------------------------------------------------------------------------------- /priv/localisation/bonfire_valueflows.pot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bonfire-networks/bonfire_valueflows/c9336b001cf78f6ee3ae9606f40f93caedcb9680/priv/localisation/bonfire_valueflows.pot -------------------------------------------------------------------------------- /priv/repo/migrations/.formatter.exs: -------------------------------------------------------------------------------- 1 | [ 2 | import_deps: [:ecto_sql], 3 | inputs: ["*.exs"] 4 | ] 5 | -------------------------------------------------------------------------------- /priv/repo/migrations/20200523081012_init_pointers.exs: -------------------------------------------------------------------------------- 1 | defmodule Bonfire.Repo.Migrations.InitPointers do 2 | @moduledoc false 3 | use Ecto.Migration 4 | import Needle.Migration 5 | import Needle.ULID.Migration 6 | 7 | def up(), do: init(:up) 8 | def down(), do: init(:down) 9 | 10 | defp init(dir) do 11 | # this one is optional but recommended 12 | init_pointers_ulid_extra(dir) 13 | # this one is not optional 14 | init_pointers(dir) 15 | end 16 | end 17 | -------------------------------------------------------------------------------- /priv/repo/migrations/20201205095039_import_valueflows.exs: -------------------------------------------------------------------------------- 1 | defmodule Bonfire.Repo.Migrations.ImportValueFlows do 2 | @moduledoc false 3 | use Ecto.Migration 4 | 5 | def up do 6 | ValueFlows.AllMigrations.up() 7 | end 8 | 9 | def down do 10 | ValueFlows.AllMigrations.down() 11 | end 12 | end 13 | -------------------------------------------------------------------------------- /priv/repo/migrations/20211112094942_import_commitment_satisfaction.exs: -------------------------------------------------------------------------------- 1 | defmodule Bonfire.Repo.Migrations.ImportCommitmentSatisfaction do 2 | @moduledoc false 3 | use Ecto.Migration 4 | 5 | def up do 6 | ValueFlows.Planning.Commitment.Migrations.up() 7 | ValueFlows.Planning.Satisfaction.Migrations.up() 8 | end 9 | 10 | def down do 11 | ValueFlows.Planning.Satisfaction.Migrations.down() 12 | ValueFlows.Planning.Commitment.Migrations.down() 13 | end 14 | end 15 | -------------------------------------------------------------------------------- /priv/repo/seeds.exs: -------------------------------------------------------------------------------- 1 | # Script for populating the database. You can run it as: 2 | # 3 | # mix run priv/repo/seeds.exs 4 | # 5 | # Inside the script, you can read and write to any of your 6 | # repositories directly: 7 | # 8 | # Bonfire.ValueFlows.Repo.insert!(%Bonfire.ValueFlows.SomeSchema{}) 9 | # 10 | # We recommend using the bang functions (`insert!`, `update!` 11 | # and so on) as they will fail if something goes wrong. 12 | -------------------------------------------------------------------------------- /test/action/action_graphql_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Valueflows.Knowledge.Action.GraphQLTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | # import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | import ValueFlows.Test.Faking 8 | 9 | @debug false 10 | @schema Bonfire.API.GraphQL.Schema 11 | 12 | describe "action" do 13 | test "fetches an existing action by label (via HTTP)" do 14 | user = fake_agent!() 15 | action = action() 16 | 17 | q = action_query() 18 | conn = user_conn(user) 19 | assert_action(grumble_post_key(q, conn, :action, %{id: action.label})) 20 | end 21 | 22 | test "fetches an existing action by label (via Absinthe.run)" do 23 | # user = fake_agent!() 24 | action = action() 25 | 26 | assert queried = 27 | Bonfire.API.GraphQL.QueryHelper.run_query_id( 28 | action.label, 29 | @schema, 30 | :action, 31 | 3, 32 | nil, 33 | @debug 34 | ) 35 | 36 | assert_action(queried) 37 | end 38 | end 39 | 40 | describe "actions" do 41 | test "fetches all actions" do 42 | user = fake_agent!() 43 | _actions = actions() 44 | q = actions_query() 45 | conn = user_conn(user) 46 | 47 | assert actions = grumble_post_key(q, conn, :actions, %{}) 48 | assert Enum.count(actions) > 1 49 | end 50 | end 51 | end 52 | -------------------------------------------------------------------------------- /test/action/actions_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Knowledge.Action.ActionsTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | # import Bonfire.Common.Simulation 5 | 6 | # import ValueFlows.Simulate 7 | import ValueFlows.Test.Faking 8 | 9 | alias ValueFlows.Knowledge.Action.Actions 10 | 11 | describe "action" do 12 | test "fetches an action" do 13 | assert {:ok, fetched} = Actions.action(:move) 14 | assert_action(fetched) 15 | assert {:ok, fetched} = Actions.action("move") 16 | assert_action(fetched) 17 | end 18 | end 19 | end 20 | -------------------------------------------------------------------------------- /test/agent/person/person_graphql_test.exs: -------------------------------------------------------------------------------- 1 | defmodule Valueflows.Agent.Person.GraphQLTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | # import Bonfire.Common.Simulation 5 | 6 | # import Bonfire.API.GraphQL.Test.GraphQLAssertions 7 | 8 | import Bonfire.Geolocate.Test.Faking 9 | 10 | import ValueFlows.Simulate 11 | import ValueFlows.Test.Faking 12 | 13 | @debug false 14 | @schema Bonfire.API.GraphQL.Schema 15 | 16 | describe "person" do 17 | test "fetches an existing person by id (via HTTP)" do 18 | user = fake_agent!() 19 | 20 | q = person_query() 21 | conn = user_conn(user) 22 | 23 | assert_agent(grumble_post_key(q, conn, :person, %{id: user.id}, "test", @debug)) 24 | end 25 | 26 | @tag :skip 27 | test "fetches an existing person by id (via Absinthe.run)" do 28 | user = fake_agent!() 29 | user2 = fake_agent!() 30 | 31 | # attach some data to the person... 32 | 33 | unit = maybe_fake_unit(user) 34 | 35 | _intent = fake_intent!(user, %{provider: user.id}) 36 | 37 | rspec = fake_resource_specification!(user) 38 | 39 | from_resource = 40 | fake_economic_resource!( 41 | user2, 42 | %{name: "Previous Resource", conforms_to: rspec.id}, 43 | unit 44 | ) 45 | 46 | resource = 47 | fake_economic_resource!( 48 | user, 49 | %{ 50 | primary_accountable: user.id, 51 | name: "Resulting Resource", 52 | conforms_to: rspec.id 53 | }, 54 | unit 55 | ) 56 | 57 | pspec = fake_process_specification!(user) 58 | _process = fake_process!(user, %{based_on: pspec.id}) 59 | 60 | _event = 61 | fake_economic_event!( 62 | user, 63 | %{ 64 | provider: user2.id, 65 | receiver: user.id, 66 | action: "transfer", 67 | input_of: fake_process!(user).id, 68 | output_of: fake_process!(user2).id, 69 | resource_conforms_to: fake_resource_specification!(user).id, 70 | resource_inventoried_as: from_resource.id, 71 | to_resource_inventoried_as: resource.id 72 | }, 73 | unit 74 | ) 75 | 76 | # IO.inspect(intent: intent) 77 | # IO.inspect(resource: resource) 78 | # IO.inspect(event: event) 79 | 80 | assert queried = 81 | Bonfire.API.GraphQL.QueryHelper.run_query_id( 82 | user.id, 83 | @schema, 84 | :person, 85 | 3, 86 | nil, 87 | @debug 88 | ) 89 | 90 | assert_agent(queried) 91 | # assert_optional(assert_url(queried["image"])) 92 | assert_intent(List.first(queried["intents"])) 93 | assert_process(List.first(queried["processes"])) 94 | assert_economic_event(List.first(queried["economicEvents"])) 95 | 96 | assert_economic_resource(List.first(queried["inventoriedEconomicResources"])) 97 | 98 | assert_geolocation(queried["primaryLocation"]) 99 | end 100 | end 101 | 102 | describe "persons" do 103 | test "fetches all" do 104 | user = fake_agent!() 105 | people = ValueFlows.Agent.People.people(user) 106 | num_people = Enum.count(people) 107 | 108 | q = people_query() 109 | conn = user_conn(user) 110 | 111 | assert fetched = grumble_post_key(q, conn, :people, %{}) 112 | assert num_people == Enum.count(fetched) 113 | end 114 | end 115 | end 116 | -------------------------------------------------------------------------------- /test/claim/claim_graphql_test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Claim.GraphQLTest do 3 | use Bonfire.ValueFlows.DataCase, async: true 4 | 5 | # import Bonfire.Common.Simulation 6 | 7 | import ValueFlows.Simulate 8 | import ValueFlows.Test.Faking 9 | 10 | @schema Bonfire.API.GraphQL.Schema 11 | 12 | describe "Claim" do 13 | test "fetches a claim by ID (via HTTP)" do 14 | user = fake_agent!() 15 | claim = fake_claim!(user) 16 | 17 | q = claim_query() 18 | conn = user_conn(user) 19 | assert fetched = grumble_post_key(q, conn, :claim, %{id: claim.id}) 20 | assert_claim(fetched) 21 | assert fetched["id"] == claim.id 22 | end 23 | 24 | @tag :skip 25 | test "fetched a full nested claim by ID (via Absinthe.run)" do 26 | user = fake_agent!() 27 | unit = maybe_fake_unit(user) 28 | 29 | claim = 30 | fake_claim!(user, %{ 31 | in_scope_of: [fake_agent!().id], 32 | resource_quantity: Bonfire.Quantify.Simulate.measure(%{unit_id: unit.id}), 33 | effort_quantity: Bonfire.Quantify.Simulate.measure(%{unit_id: unit.id}), 34 | resource_conforms_to: fake_resource_specification!(user).id, 35 | triggered_by: fake_economic_event!(user).id 36 | }) 37 | 38 | assert queried = 39 | Bonfire.API.GraphQL.QueryHelper.run_query_id( 40 | claim.id, 41 | @schema, 42 | :claim, 43 | 3 44 | ) 45 | 46 | assert_claim(queried) 47 | end 48 | end 49 | 50 | describe "createClaim" do 51 | test "creates a new claim" do 52 | user = fake_agent!() 53 | 54 | q = create_claim_mutation() 55 | conn = user_conn(user) 56 | 57 | vars = %{ 58 | claim: 59 | claim_input(%{ 60 | "provider" => fake_agent!().id, 61 | "receiver" => fake_agent!().id 62 | }) 63 | } 64 | 65 | assert claim = grumble_post_key(q, conn, :create_claim, vars)["claim"] 66 | assert_claim(claim) 67 | end 68 | 69 | test "fails for a guest user" do 70 | q = create_claim_mutation() 71 | 72 | vars = %{ 73 | claim: 74 | claim_input(%{ 75 | "provider" => fake_agent!().id, 76 | "receiver" => fake_agent!().id 77 | }) 78 | } 79 | 80 | assert [%{"code" => "needs_login"}] = grumble_post_errors(q, json_conn(), vars) 81 | end 82 | end 83 | 84 | describe "updateClaim" do 85 | test "updates an existing claim" do 86 | user = fake_agent!() 87 | claim = fake_claim!(user) 88 | 89 | q = update_claim_mutation() 90 | conn = user_conn(user) 91 | 92 | vars = %{claim: claim_input(%{"id" => claim.id})} 93 | 94 | assert updated = grumble_post_key(q, conn, :update_claim, vars)["claim"] 95 | assert_claim(updated) 96 | assert updated["id"] == claim.id 97 | end 98 | 99 | test "fails for a guest user" do 100 | claim = fake_claim!(fake_agent!()) 101 | q = update_claim_mutation() 102 | vars = %{claim: claim_input(%{"id" => claim.id})} 103 | 104 | assert [%{"code" => "needs_login"}] = grumble_post_errors(q, json_conn(), vars) 105 | end 106 | end 107 | 108 | describe "deleteClaim" do 109 | test "deletes an existing claim" do 110 | user = fake_agent!() 111 | claim = fake_claim!(user) 112 | 113 | q = delete_claim_mutation() 114 | conn = user_conn(user) 115 | 116 | vars = %{id: claim.id} 117 | 118 | assert grumble_post_key(q, conn, :delete_claim, vars) 119 | end 120 | 121 | test "fails for a guest user" do 122 | claim = fake_claim!(fake_agent!()) 123 | q = delete_claim_mutation() 124 | vars = %{id: claim.id} 125 | 126 | assert [%{"code" => "needs_login"}] = grumble_post_errors(q, json_conn(), vars) 127 | end 128 | end 129 | end 130 | -------------------------------------------------------------------------------- /test/economic_resource/resource_graphql_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.EconomicResource.GraphQLTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | import ValueFlows.Test.Faking 8 | # alias Grumble.PP 9 | alias ValueFlows.EconomicResource.EconomicResources 10 | 11 | import Bonfire.Geolocate.Simulate 12 | # import Bonfire.Geolocate.Test.Faking 13 | 14 | @debug false 15 | @schema Bonfire.API.GraphQL.Schema 16 | 17 | describe "EconomicResource" do 18 | test "fetches a basic economic resource by ID" do 19 | user = fake_agent!() 20 | resource = fake_economic_resource!(user) 21 | 22 | q = economic_resource_query() 23 | conn = user_conn(user) 24 | 25 | assert fetched = grumble_post_key(q, conn, :economic_resource, %{id: resource.id}) 26 | 27 | assert_economic_resource(fetched) 28 | end 29 | 30 | @tag :skip 31 | test "fetches a full nested economic resource by ID (via Absinthe.run)" do 32 | user = fake_agent!() 33 | 34 | location = fake_geolocation!(user) 35 | owner = fake_agent!() 36 | unit = maybe_fake_unit(user) 37 | 38 | attrs = %{ 39 | current_location: location.id, 40 | conforms_to: fake_resource_specification!(user).id, 41 | contained_in: fake_economic_resource!(user).id, 42 | primary_accountable: owner.id, 43 | accounting_quantity: Bonfire.Quantify.Simulate.measure(%{unit_id: unit.id}), 44 | onhand_quantity: Bonfire.Quantify.Simulate.measure(%{unit_id: unit.id}), 45 | unit_of_effort: maybe_fake_unit(user).id 46 | } 47 | 48 | assert {:ok, resource} = EconomicResources.create(user, economic_resource(attrs)) 49 | 50 | assert_economic_resource(resource) 51 | 52 | # IO.inspect(created: resource) 53 | 54 | assert queried = 55 | Bonfire.API.GraphQL.QueryHelper.run_query_id( 56 | resource.id, 57 | @schema, 58 | :economic_resource, 59 | 4, 60 | nil, 61 | @debug 62 | ) 63 | 64 | assert_economic_resource(queried) 65 | end 66 | 67 | test "fail if has been deleted" do 68 | user = fake_agent!() 69 | resource = fake_economic_resource!(user) 70 | 71 | q = economic_resource_query() 72 | conn = user_conn(user) 73 | 74 | assert {:ok, _spec} = EconomicResources.soft_delete(resource) 75 | 76 | assert [ 77 | %{ 78 | "code" => "not_found", 79 | "path" => ["economicResource"], 80 | "status" => 404 81 | } 82 | ] = grumble_post_errors(q, conn, %{id: resource.id}) 83 | end 84 | end 85 | 86 | describe "EconomicResources" do 87 | test "return a list of economicResources" do 88 | user = fake_agent!() 89 | resources = some(5, fn -> fake_economic_resource!(user) end) 90 | # deleted 91 | some(2, fn -> 92 | resource = fake_economic_resource!(user) 93 | {:ok, resource} = EconomicResources.soft_delete(resource) 94 | resource 95 | end) 96 | 97 | q = economic_resources_query() 98 | conn = user_conn(user) 99 | 100 | assert fetched_economic_resources = grumble_post_key(q, conn, :economic_resources, %{}) 101 | 102 | assert Enum.count(resources) == Enum.count(fetched_economic_resources) 103 | end 104 | end 105 | 106 | describe "EconomicResourcesPages" do 107 | test "return a list of economicResources" do 108 | user = fake_agent!() 109 | resources = some(5, fn -> fake_economic_resource!(user) end) 110 | # deleted 111 | some(2, fn -> 112 | resource = fake_economic_resource!(user) 113 | {:ok, resource} = EconomicResources.soft_delete(resource) 114 | resource 115 | end) 116 | 117 | after_resource = List.first(resources) 118 | 119 | q = economic_resources_pages_query() 120 | conn = user_conn(user) 121 | vars = %{after: after_resource.id, limit: 2} 122 | 123 | assert page = grumble_post_key(q, conn, :economic_resources_pages, vars) 124 | assert Enum.count(resources) == page["totalCount"] 125 | assert List.first(page["edges"])["id"] == after_resource.id 126 | end 127 | end 128 | end 129 | -------------------------------------------------------------------------------- /test/planning/intents_test.exs: -------------------------------------------------------------------------------- 1 | # SPDX-License-Identifier: AGPL-3.0-only 2 | defmodule ValueFlows.Planning.Intent.IntentsTest do 3 | use Bonfire.ValueFlows.DataCase, async: true 4 | 5 | # import Bonfire.Common.Simulation 6 | 7 | import ValueFlows.Simulate 8 | import ValueFlows.Test.Faking 9 | 10 | alias ValueFlows.Planning.Intent.Intents 11 | 12 | describe "one" do 13 | test "fetches an existing intent by ID" do 14 | user = fake_agent!() 15 | intent = fake_intent!(user) 16 | 17 | assert {:ok, fetched} = Intents.one(id: intent.id) 18 | assert_intent(intent, fetched) 19 | assert {:ok, fetched} = Intents.one(user: user) 20 | assert_intent(intent, fetched) 21 | # TODO 22 | # assert {:ok, fetched} = Intents.one(context: comm) 23 | end 24 | end 25 | 26 | describe "create" do 27 | test "can create an intent" do 28 | user = fake_agent!() 29 | 30 | assert {:ok, intent} = Intents.create(user, intent()) 31 | assert_intent(intent) 32 | end 33 | 34 | test "can create an intent with measure" do 35 | user = fake_agent!() 36 | unit = maybe_fake_unit(user) 37 | 38 | measures = %{ 39 | resource_quantity: Bonfire.Quantify.Simulate.measure(%{unit_id: unit.id}), 40 | effort_quantity: Bonfire.Quantify.Simulate.measure(%{unit_id: unit.id}), 41 | available_quantity: Bonfire.Quantify.Simulate.measure(%{unit_id: unit.id}) 42 | } 43 | 44 | assert {:ok, intent} = Intents.create(user, intent(measures)) 45 | assert_intent(intent) 46 | end 47 | 48 | test "can create an intent with provider and receiver" do 49 | user = fake_agent!() 50 | 51 | attrs = %{ 52 | provider: fake_agent!().id 53 | } 54 | 55 | assert {:ok, intent} = Intents.create(user, intent(attrs)) 56 | assert intent.provider_id == attrs.provider 57 | 58 | attrs = %{ 59 | receiver: fake_agent!().id 60 | } 61 | 62 | assert {:ok, intent} = Intents.create(user, intent(attrs)) 63 | assert intent.receiver_id == attrs.receiver 64 | 65 | attrs = %{ 66 | receiver: fake_agent!().id, 67 | provider: fake_agent!().id 68 | } 69 | 70 | assert {:ok, intent} = Intents.create(user, intent(attrs)) 71 | assert intent.receiver_id == attrs.receiver 72 | assert intent.provider_id == attrs.provider 73 | end 74 | 75 | test "can create an intent with a context" do 76 | user = fake_agent!() 77 | context = fake_agent!() 78 | 79 | attrs = %{in_scope_of: [context.id]} 80 | 81 | assert {:ok, intent} = Intents.create(user, intent(attrs)) 82 | assert_intent(intent) 83 | assert intent.context.id == context.id 84 | end 85 | 86 | test "can create an intent with tags" do 87 | user = fake_agent!() 88 | tags = some_fake_categories(user) 89 | 90 | attrs = intent(%{tags: tags}) 91 | assert {:ok, intent} = Intents.create(user, attrs) 92 | assert_intent(intent) 93 | intent = repo().preload(intent, :tags) 94 | assert Enum.count(intent.tags) == Enum.count(tags) 95 | end 96 | end 97 | 98 | describe "update" do 99 | test "updates an existing intent" do 100 | user = fake_agent!() 101 | unit = maybe_fake_unit(user) 102 | intent = fake_intent!(user) 103 | 104 | measures = %{ 105 | resource_quantity: Bonfire.Quantify.Simulate.measure(%{unit_id: unit.id}), 106 | # don't update one of them 107 | # effort_quantity: Bonfire.Quantify.Simulate.measure(%{unit_id: unit.id}), 108 | available_quantity: Bonfire.Quantify.Simulate.measure(%{unit_id: unit.id}) 109 | } 110 | 111 | assert {:ok, updated} = Intents.update(user, intent, intent(measures)) 112 | assert_intent(updated) 113 | assert intent != updated 114 | assert intent.effort_quantity_id == updated.effort_quantity_id 115 | assert intent.resource_quantity_id != updated.resource_quantity_id 116 | assert intent.available_quantity_id != updated.available_quantity_id 117 | end 118 | 119 | test "fails if we don't have permission" do 120 | user = fake_agent!() 121 | random = fake_agent!() 122 | intent = fake_intent!(user) 123 | 124 | assert {:error, :not_permitted} = 125 | Intents.update(random, intent, intent(%{note: "i can hackz?"})) 126 | end 127 | 128 | test "doesn't update if invalid action is given" do 129 | user = fake_agent!() 130 | intent = fake_intent!(user) 131 | 132 | assert {:ok, %{action: action}} = 133 | Intents.update(user, intent, intent(%{action: "sleeping"})) 134 | 135 | assert action == intent.action 136 | end 137 | end 138 | end 139 | -------------------------------------------------------------------------------- /test/process/process_federate.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Process.FederateTest do 2 | use Bonfire.ValueFlows.DataCase 3 | @moduletag :federation 4 | 5 | import Bonfire.Common.Simulation 6 | import Bonfire.Geolocate.Simulate 7 | import ValueFlows.Simulate 8 | import ValueFlows.Test.Faking 9 | 10 | @debug false 11 | @schema Bonfire.API.GraphQL.Schema 12 | 13 | describe "process" do 14 | test "federates/publishes" do 15 | user = fake_agent!() 16 | 17 | process = fake_process!(user) 18 | 19 | # IO.inspect(pre_fed: proposal) 20 | 21 | assert {:ok, activity} = Bonfire.Federate.ActivityPub.Outgoing.push_now!(process) 22 | 23 | # IO.inspect(published: activity) ######## 24 | 25 | assert activity.object.pointer_id == process.id 26 | assert activity.local == true 27 | 28 | assert activity.object.data["name"] == process.name 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /test/process/processes_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Process.ProcessesTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | import ValueFlows.Test.Faking 8 | 9 | alias ValueFlows.Process.Processes 10 | 11 | describe "one" do 12 | test "fetches an existing process by ID" do 13 | user = fake_agent!() 14 | spec = fake_process!(user) 15 | 16 | assert {:ok, fetched} = Processes.one(id: spec.id) 17 | assert_process(fetched) 18 | assert {:ok, fetched} = Processes.one(user: user) 19 | assert_process(fetched) 20 | end 21 | 22 | test "cannot fetch a deleted process" do 23 | user = fake_agent!() 24 | spec = fake_process!(user) 25 | assert {:ok, spec} = Processes.soft_delete(spec) 26 | assert {:error, :not_found} = Processes.one([:deleted, id: spec.id]) 27 | end 28 | end 29 | 30 | describe "create" do 31 | test "can create a process" do 32 | user = fake_agent!() 33 | 34 | assert {:ok, process} = Processes.create(user, process()) 35 | assert_process(process) 36 | end 37 | 38 | test "can create a process with context" do 39 | user = fake_agent!() 40 | parent = fake_agent!() 41 | 42 | attrs = %{in_scope_of: [parent.id]} 43 | assert {:ok, process} = Processes.create(user, process(attrs)) 44 | assert_process(process) 45 | assert process.context.id == parent.id 46 | end 47 | 48 | test "can create a process with tags" do 49 | user = fake_agent!() 50 | tags = some_fake_categories(user) 51 | 52 | attrs = process(%{tags: tags}) 53 | assert {:ok, process} = Processes.create(user, attrs) 54 | assert_process(process) 55 | 56 | process = repo().preload(process, :tags) 57 | assert Enum.count(process.tags) == Enum.count(tags) 58 | end 59 | end 60 | 61 | describe "update" do 62 | test "can update an existing process" do 63 | user = fake_agent!() 64 | spec = fake_process!(user) 65 | 66 | assert {:ok, updated} = Processes.update(spec, process()) 67 | assert_process(updated) 68 | assert updated.updated_at != spec.updated_at 69 | end 70 | end 71 | 72 | describe "soft delete" do 73 | test "delete an existing process" do 74 | user = fake_agent!() 75 | spec = fake_process!(user) 76 | 77 | refute spec.deleted_at 78 | assert {:ok, spec} = Processes.soft_delete(spec) 79 | assert spec.deleted_at 80 | end 81 | end 82 | end 83 | -------------------------------------------------------------------------------- /test/process_specification/process_spec_federate.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.ProcessSpecification.FederateTest do 2 | use Bonfire.ValueFlows.DataCase 3 | @moduletag :federation 4 | 5 | import Bonfire.Common.Simulation 6 | import Bonfire.Geolocate.Simulate 7 | import ValueFlows.Simulate 8 | import ValueFlows.Test.Faking 9 | 10 | @debug false 11 | @schema Bonfire.API.GraphQL.Schema 12 | 13 | describe "process spec" do 14 | test "federates/publishes" do 15 | user = fake_agent!() 16 | 17 | process_spec = fake_process_specification!(user) 18 | 19 | # IO.inspect(pre_fed: proposal) 20 | 21 | assert {:ok, activity} = Bonfire.Federate.ActivityPub.Outgoing.push_now!(process_spec) 22 | 23 | # IO.inspect(published: activity) ######## 24 | 25 | assert activity.object.pointer_id == process_spec.id 26 | assert activity.local == true 27 | 28 | assert activity.object.data["name"] =~ process_spec.name 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /test/process_specification/process_specs_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Knowledge.ProcessSpecification.ProcessSpecificationsTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | # import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | import ValueFlows.Test.Faking 8 | 9 | alias ValueFlows.Knowledge.ProcessSpecification.ProcessSpecifications 10 | 11 | describe "one" do 12 | test "fetches an existing process specification by ID" do 13 | user = fake_agent!() 14 | spec = fake_process_specification!(user) 15 | 16 | assert {:ok, fetched} = ProcessSpecifications.one(id: spec.id) 17 | assert_process_specification(fetched) 18 | assert {:ok, fetched} = ProcessSpecifications.one(user: user) 19 | assert_process_specification(fetched) 20 | end 21 | 22 | test "cannot fetch a deleted process specification" do 23 | user = fake_agent!() 24 | spec = fake_process_specification!(user) 25 | assert {:ok, spec} = ProcessSpecifications.soft_delete(spec) 26 | 27 | assert {:error, :not_found} = ProcessSpecifications.one([:deleted, id: spec.id]) 28 | end 29 | end 30 | 31 | describe "create" do 32 | test "can create a process specification" do 33 | user = fake_agent!() 34 | 35 | assert {:ok, spec} = ProcessSpecifications.create(user, process_specification()) 36 | 37 | assert_process_specification(spec) 38 | end 39 | 40 | test "can create a process specification with context" do 41 | user = fake_agent!() 42 | 43 | attrs = %{in_scope_of: [fake_agent!().id]} 44 | 45 | assert {:ok, spec} = ProcessSpecifications.create(user, process_specification(attrs)) 46 | 47 | assert_process_specification(spec) 48 | assert spec.context_id == hd(attrs.in_scope_of) 49 | end 50 | 51 | test "can create a process_specification with tags" do 52 | user = fake_agent!() 53 | tags = some_fake_categories(user) 54 | 55 | attrs = process_specification(%{tags: tags}) 56 | 57 | assert {:ok, process_specification} = ProcessSpecifications.create(user, attrs) 58 | 59 | assert_process_specification(process_specification) 60 | 61 | process_specification = repo().preload(process_specification, :tags) 62 | assert Enum.count(process_specification.tags) == Enum.count(tags) 63 | end 64 | end 65 | 66 | describe "update" do 67 | test "can update an existing process specification" do 68 | user = fake_agent!() 69 | spec = fake_process_specification!(user) 70 | 71 | assert {:ok, updated} = ProcessSpecifications.update(spec, process_specification()) 72 | 73 | assert_process_specification(updated) 74 | assert updated.updated_at != spec.updated_at 75 | end 76 | end 77 | 78 | describe "soft delete" do 79 | test "delete an existing process specification" do 80 | user = fake_agent!() 81 | spec = fake_process_specification!(user) 82 | 83 | refute spec.deleted_at 84 | assert {:ok, spec} = ProcessSpecifications.soft_delete(spec) 85 | assert spec.deleted_at 86 | end 87 | end 88 | end 89 | -------------------------------------------------------------------------------- /test/proposal/proposed_intent_graphql_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Proposal.ProposedIntentGraphQLTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | # import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | import ValueFlows.Test.Faking 8 | 9 | describe "propose_intent" do 10 | test "creates a new proposed intent" do 11 | user = fake_agent!() 12 | proposal = fake_proposal!(user) 13 | intent = fake_intent!(user) 14 | 15 | q = propose_intent_mutation(fields: [publishes: [:id], published_in: [:id]]) 16 | 17 | conn = user_conn(user) 18 | 19 | vars = 20 | proposed_intent_input(%{ 21 | "publishes" => intent.id, 22 | "publishedIn" => proposal.id 23 | }) 24 | 25 | assert proposed_intent = 26 | grumble_post_key(q, conn, :propose_intent, vars)[ 27 | "proposedIntent" 28 | ] 29 | 30 | assert_proposed_intent(proposed_intent) 31 | assert proposed_intent["publishedIn"]["id"] == proposal.id 32 | assert proposed_intent["publishes"]["id"] == intent.id 33 | end 34 | end 35 | 36 | describe "delete_proposed_intent" do 37 | test "deletes a proposed intent" do 38 | user = fake_agent!() 39 | 40 | proposed_intent = 41 | fake_proposed_intent!( 42 | fake_proposal!(user), 43 | fake_intent!(user) 44 | ) 45 | 46 | q = delete_proposed_intent_mutation() 47 | 48 | conn = user_conn(user) 49 | vars = %{id: proposed_intent.id} 50 | assert grumble_post_key(q, conn, :delete_proposed_intent, vars) 51 | end 52 | end 53 | end 54 | -------------------------------------------------------------------------------- /test/proposal/proposed_to_graphql_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Proposal.ProposedToGraphQLTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | # import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | import ValueFlows.Test.Faking 8 | 9 | describe "propose_to" do 10 | test "creates a new proposed to item" do 11 | user = fake_agent!() 12 | proposal = fake_proposal!(user) 13 | agent = fake_agent!() 14 | 15 | q = propose_to_mutation(fields: [proposed: [:id], proposed_to: [:id]]) 16 | 17 | conn = user_conn(user) 18 | 19 | vars = %{ 20 | "proposed" => proposal.id, 21 | "proposedTo" => agent.id 22 | } 23 | 24 | assert proposed_to = grumble_post_key(q, conn, :propose_to, vars)["proposedTo"] 25 | 26 | assert_proposed_to(proposed_to) 27 | assert proposed_to["proposed"]["id"] == proposal.id 28 | assert proposed_to["proposedTo"]["id"] == agent.id 29 | end 30 | end 31 | 32 | describe "delete_proposed_to" do 33 | test "deletes an existing proposed to item" do 34 | user = fake_agent!() 35 | proposed_to = fake_proposed_to!(fake_agent!(), fake_proposal!(user)) 36 | 37 | q = delete_proposed_to_mutation() 38 | conn = user_conn(user) 39 | 40 | assert grumble_post_key(q, conn, :delete_proposed_to, %{ 41 | id: proposed_to.id 42 | }) 43 | end 44 | end 45 | end 46 | -------------------------------------------------------------------------------- /test/resource_specification/resource_spec_federate.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.ResourceSpecification.FederateTest do 2 | use Bonfire.ValueFlows.DataCase 3 | @moduletag :federation 4 | 5 | import Bonfire.Common.Simulation 6 | import Bonfire.Geolocate.Simulate 7 | import ValueFlows.Simulate 8 | import ValueFlows.Test.Faking 9 | 10 | @debug false 11 | @schema Bonfire.API.GraphQL.Schema 12 | 13 | describe "resource spec" do 14 | test "federates/publishes" do 15 | user = fake_agent!() 16 | 17 | resource_spec = fake_resource_specification!(user) 18 | 19 | # IO.inspect(pre_fed: proposal) 20 | 21 | assert {:ok, activity} = Bonfire.Federate.ActivityPub.Outgoing.push_now!(resource_spec) 22 | 23 | info(activity) 24 | 25 | assert activity.object.pointer_id == resource_spec.id 26 | assert activity.local == true 27 | 28 | assert activity.object.data["name"] =~ resource_spec.name 29 | end 30 | end 31 | end 32 | -------------------------------------------------------------------------------- /test/resource_specification/resource_specs_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Knowledge.ResourceSpecification.ResourceSpecificationsTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | # import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | import ValueFlows.Test.Faking 8 | 9 | alias ValueFlows.Knowledge.ResourceSpecification.ResourceSpecifications 10 | 11 | describe "one" do 12 | test "fetches an existing resource specification by ID" do 13 | user = fake_agent!() 14 | spec = fake_resource_specification!(user) 15 | 16 | assert {:ok, fetched} = ResourceSpecifications.one(id: spec.id) 17 | assert_resource_specification(fetched) 18 | assert {:ok, fetched} = ResourceSpecifications.one(user: user) 19 | assert_resource_specification(fetched) 20 | end 21 | 22 | test "cannot fetch a deleted resource specification" do 23 | user = fake_agent!() 24 | spec = fake_resource_specification!(user) 25 | assert {:ok, spec} = ResourceSpecifications.soft_delete(spec) 26 | 27 | assert {:error, :not_found} = ResourceSpecifications.one([:deleted, id: spec.id]) 28 | end 29 | end 30 | 31 | describe "create" do 32 | test "can create a resource specification" do 33 | user = fake_agent!() 34 | 35 | assert {:ok, spec} = ResourceSpecifications.create(user, resource_specification()) 36 | 37 | assert_resource_specification(spec) 38 | end 39 | 40 | test "can create a resource specification with context" do 41 | user = fake_agent!() 42 | parent = fake_agent!() 43 | 44 | attrs = %{in_scope_of: [parent.id]} 45 | 46 | assert {:ok, spec} = 47 | ResourceSpecifications.create( 48 | user, 49 | resource_specification(attrs) 50 | ) 51 | 52 | assert_resource_specification(spec) 53 | assert spec.context_id == parent.id 54 | end 55 | 56 | test "can create a resource_specification with tags" do 57 | user = fake_agent!() 58 | tags = some_fake_categories(user) 59 | 60 | attrs = resource_specification(%{tags: tags}) 61 | 62 | assert {:ok, resource_specification} = ResourceSpecifications.create(user, attrs) 63 | 64 | assert_resource_specification(resource_specification) 65 | 66 | resource_specification = repo().preload(resource_specification, :tags) 67 | assert Enum.count(resource_specification.tags) == Enum.count(tags) 68 | end 69 | end 70 | 71 | describe "update" do 72 | test "can update an existing resource specification" do 73 | user = fake_agent!() 74 | spec = fake_resource_specification!(user) 75 | 76 | assert {:ok, updated} = ResourceSpecifications.update(spec, resource_specification()) 77 | 78 | assert_resource_specification(updated) 79 | assert updated.updated_at != spec.updated_at 80 | end 81 | end 82 | 83 | describe "soft delete" do 84 | test "delete an existing resource specification" do 85 | user = fake_agent!() 86 | spec = fake_resource_specification!(user) 87 | 88 | refute spec.deleted_at 89 | assert {:ok, spec} = ResourceSpecifications.soft_delete(spec) 90 | assert spec.deleted_at 91 | end 92 | end 93 | end 94 | -------------------------------------------------------------------------------- /test/support/channel_case.ex: -------------------------------------------------------------------------------- 1 | defmodule Bonfire.ValueFlows.ChannelCase do 2 | @moduledoc """ 3 | This module defines the test case to be used by 4 | channel tests. 5 | 6 | Such tests rely on `Phoenix.ChannelTest` and also 7 | import other functionality to make it easier 8 | to build common data structures and query the data layer. 9 | 10 | Finally, if the test case interacts with the database, 11 | we enable the SQL sandbox, so changes done to the database 12 | are reverted at the end of every test. If you are using 13 | PostgreSQL, you can even run database tests asynchronously 14 | by setting `use MyApp.Web.ChannelCase, async: true`, although 15 | this option is not recommended for other databases. 16 | """ 17 | 18 | use ExUnit.CaseTemplate 19 | 20 | using do 21 | quote do 22 | # Import conveniences for testing with channels 23 | import Phoenix.ChannelTest 24 | import Bonfire.ValueFlows.ChannelCase 25 | import Untangle 26 | import Bonfire.Common.Config, only: [repo: 0] 27 | 28 | # The default endpoint for testing 29 | @endpoint Application.compile_env!(:bonfire, :endpoint_module) 30 | end 31 | end 32 | 33 | setup tags do 34 | import Bonfire.Common.Config, only: [repo: 0] 35 | 36 | Bonfire.Common.Test.Interactive.setup_test_repo(tags) 37 | 38 | :ok 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /test/support/conn_case.ex: -------------------------------------------------------------------------------- 1 | defmodule Bonfire.ValueFlows.ConnCase do 2 | @moduledoc """ 3 | This module defines the test case to be used by 4 | tests that require setting up a connection. 5 | 6 | Such tests rely on `Phoenix.ConnTest` and also 7 | import other functionality to make it easier 8 | to build common data structures and query the data layer. 9 | 10 | Finally, if the test case interacts with the database, 11 | we enable the SQL sandbox, so changes done to the database 12 | are reverted at the end of every test. If you are using 13 | PostgreSQL, you can even run database tests asynchronously 14 | by setting `use MyApp.Web.ConnCase, async: true`, although 15 | this option is not recommended for other databases. 16 | """ 17 | 18 | use ExUnit.CaseTemplate 19 | 20 | using do 21 | quote do 22 | # Import conveniences for testing with connections 23 | import Plug.Conn 24 | import Phoenix.ConnTest 25 | import Phoenix.LiveViewTest 26 | # import Bonfire.ValueFlows.ConnCase 27 | import Bonfire.ValueFlows.Test.ConnHelpers 28 | # import Bonfire.ValueFlows.Test.FakeHelpers 29 | alias Bonfire.ValueFlows.Fake 30 | alias Bonfire.ValueFlows.Web.Router.Helpers, as: Routes 31 | 32 | import Bonfire.Common.Config, only: [repo: 0] 33 | import Untangle 34 | 35 | # The default endpoint for testing 36 | @endpoint Application.compile_env!(:bonfire, :endpoint_module) 37 | 38 | @moduletag :ui 39 | end 40 | end 41 | 42 | setup tags do 43 | import Bonfire.Common.Config, only: [repo: 0] 44 | 45 | Bonfire.Common.Test.Interactive.setup_test_repo(tags) 46 | 47 | {:ok, []} 48 | end 49 | end 50 | -------------------------------------------------------------------------------- /test/support/data_case.ex: -------------------------------------------------------------------------------- 1 | defmodule Bonfire.ValueFlows.DataCase do 2 | @moduledoc """ 3 | This module defines the setup for tests requiring 4 | access to the application's data layer. 5 | 6 | You may define functions here to be used as helpers in 7 | your tests. 8 | 9 | Finally, if the test case interacts with the database, 10 | we enable the SQL sandbox, so changes done to the database 11 | are reverted at the end of every test. If you are using 12 | PostgreSQL, you can even run database tests asynchronously 13 | by setting `use Bonfire.ValueFlows.DataCase, async: true`, although 14 | this option is not recommended for other databases. 15 | """ 16 | 17 | use ExUnit.CaseTemplate 18 | 19 | using do 20 | quote do 21 | import Ecto 22 | import Ecto.Changeset 23 | import Ecto.Query 24 | # import Bonfire.ValueFlows.DataCase 25 | import Untangle 26 | import Bonfire.Common.Config, only: [repo: 0] 27 | 28 | @moduletag :backend 29 | end 30 | end 31 | 32 | setup tags do 33 | import Bonfire.Common.Config, only: [repo: 0] 34 | ActivityPub.Utils.cache_clear() 35 | Bonfire.Common.Test.Interactive.setup_test_repo(tags) 36 | 37 | :ok 38 | end 39 | 40 | @doc """ 41 | A helper that transforms changeset errors into a map of messages. 42 | 43 | assert {:error, changeset} = Accounts.create_user(%{password: "short"}) 44 | assert "password is too short" in errors_on(changeset).password 45 | assert %{password: ["password is too short"]} = errors_on(changeset) 46 | 47 | """ 48 | def errors_on(changeset) do 49 | Ecto.Changeset.traverse_errors(changeset, fn {message, opts} -> 50 | Regex.replace(~r"%{(\w+)}", message, fn _, key -> 51 | opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string() 52 | end) 53 | end) 54 | end 55 | end 56 | -------------------------------------------------------------------------------- /test/support/data_helpers.ex: -------------------------------------------------------------------------------- 1 | defmodule Bonfire.ValueFlows.DataHelpers do 2 | # import ExUnit.Assertions 3 | # alias Bonfire.ValueFlows.Fake 4 | end 5 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.start(exclude: Bonfire.Common.RuntimeConfig.skip_test_tags()) 2 | 3 | Ecto.Adapters.SQL.Sandbox.mode( 4 | Bonfire.Common.Config.repo(), 5 | :manual 6 | ) 7 | -------------------------------------------------------------------------------- /test/track_trace/events_track_trace_graphql_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.EconomicEvent.EventsTrackTraceGraphQLTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | import ValueFlows.Test.Faking 8 | 9 | # alias Grumble.PP 10 | alias ValueFlows.EconomicEvent.EconomicEvents 11 | 12 | import Bonfire.Geolocate.Simulate 13 | # import Bonfire.Geolocate.Test.Faking 14 | 15 | @debug false 16 | @schema Bonfire.API.GraphQL.Schema 17 | 18 | describe "EconomicEvent.track" do 19 | test "Returns a list of EconomicResources or Processes" do 20 | user = fake_agent!() 21 | unit = maybe_fake_unit(user) 22 | 23 | process = fake_process!(user) 24 | another_process = fake_process!(user) 25 | 26 | resource = fake_economic_resource!(user, %{}, unit) 27 | another_resource = fake_economic_resource!(user, %{}, unit) 28 | 29 | event = 30 | fake_economic_event!( 31 | user, 32 | %{ 33 | input_of: process.id, 34 | output_of: another_process.id, 35 | resource_inventoried_as: resource.id, 36 | to_resource_inventoried_as: another_resource.id, 37 | action: "transfer" 38 | }, 39 | unit 40 | ) 41 | 42 | q = economic_event_query(fields: [track: [:__typename]]) 43 | conn = user_conn(user) 44 | 45 | assert event = grumble_post_key(q, conn, :economic_event, %{id: event.id}) 46 | assert Enum.count(event["track"]) >= 3 47 | end 48 | end 49 | 50 | describe "EconomicEvent.trace" do 51 | test "Returns a list of economic events that are outputs" do 52 | user = fake_agent!() 53 | unit = maybe_fake_unit(user) 54 | 55 | process = fake_process!(user) 56 | another_process = fake_process!(user) 57 | 58 | resource = fake_economic_resource!(user, %{}, unit) 59 | another_resource = fake_economic_resource!(user, %{}, unit) 60 | 61 | event = 62 | fake_economic_event!( 63 | user, 64 | %{ 65 | input_of: process.id, 66 | output_of: another_process.id, 67 | resource_inventoried_as: resource.id, 68 | to_resource_inventoried_as: another_resource.id, 69 | action: "transfer" 70 | }, 71 | unit 72 | ) 73 | 74 | q = economic_event_query(fields: [trace: [:__typename]]) 75 | conn = user_conn(user) 76 | 77 | assert event = grumble_post_key(q, conn, :economic_event, %{id: event.id}) 78 | assert Enum.count(event["trace"]) >= 2 79 | end 80 | end 81 | end 82 | -------------------------------------------------------------------------------- /test/track_trace/process_track_trace_graphql_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Process.TrackTraceGraphQLTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | import Bonfire.Common.Simulation 5 | 6 | # alias Grumble.PP 7 | # import Grumble 8 | import ValueFlows.Simulate 9 | import ValueFlows.Test.Faking 10 | 11 | alias ValueFlows.Process.Processes 12 | 13 | @debug false 14 | @schema Bonfire.API.GraphQL.Schema 15 | 16 | describe "Process.track" do 17 | test "Returns a list of economic events that are outputs" do 18 | user = fake_agent!() 19 | process = fake_process!(user) 20 | 21 | _output_events = 22 | some(5, fn -> 23 | fake_economic_event!(user, %{ 24 | output_of: process.id, 25 | action: "produce" 26 | }) 27 | end) 28 | 29 | q = process_query(fields: [track: [:__typename]]) 30 | conn = user_conn(user) 31 | 32 | assert process = grumble_post_key(q, conn, :process, %{id: process.id}) 33 | assert Enum.count(process["track"]) >= 5 34 | end 35 | end 36 | 37 | describe "Process.trace" do 38 | test "Returns a list of economic events that are outputs" do 39 | user = fake_agent!() 40 | process = fake_process!(user) 41 | 42 | _input_events = 43 | some(5, fn -> 44 | fake_economic_event!(user, %{ 45 | input_of: process.id, 46 | action: "consume" 47 | }) 48 | end) 49 | 50 | q = process_query(fields: [trace: [:__typename]]) 51 | conn = user_conn(user) 52 | 53 | assert process = grumble_post_key(q, conn, :process, %{id: process.id}) 54 | assert Enum.count(process["trace"]) >= 5 55 | end 56 | end 57 | 58 | describe "Process.inputs" do 59 | test "Returns a list of economic events that are inputs" do 60 | user = fake_agent!() 61 | process = fake_process!(user) 62 | 63 | _input_events = 64 | some(5, fn -> 65 | fake_economic_event!(user, %{ 66 | input_of: process.id, 67 | action: "use" 68 | }) 69 | end) 70 | 71 | q = process_inputs_query(fields: economic_event_fields()) 72 | conn = user_conn(user) 73 | 74 | assert process = grumble_post_key(q, conn, :process, %{id: process.id}) 75 | assert Enum.count(process["inputs"]) == 5 76 | end 77 | 78 | test "Returns a list of economic events that are inputs and with an action consume" do 79 | user = fake_agent!() 80 | process = fake_process!(user) 81 | 82 | _input_events = 83 | some(5, fn -> 84 | fake_economic_event!(user, %{ 85 | input_of: process.id, 86 | action: "consume" 87 | }) 88 | end) 89 | 90 | _other_input_events = 91 | some(5, fn -> 92 | fake_economic_event!(user, %{ 93 | input_of: process.id, 94 | action: "use" 95 | }) 96 | end) 97 | 98 | q = process_inputs_query(fields: economic_event_fields()) 99 | conn = user_conn(user) 100 | 101 | assert process = 102 | grumble_post_key(q, conn, :process, %{ 103 | id: process.id, 104 | action_id: "consume" 105 | }) 106 | 107 | assert Enum.count(process["inputs"]) == 5 108 | end 109 | end 110 | 111 | describe "Process.outputs" do 112 | test "Returns a list of economic events that are outputs" do 113 | user = fake_agent!() 114 | process = fake_process!(user) 115 | 116 | _output_events = 117 | some(5, fn -> 118 | fake_economic_event!(user, %{ 119 | output_of: process.id, 120 | action: "produce" 121 | }) 122 | end) 123 | 124 | q = process_outputs_query(fields: economic_event_fields()) 125 | conn = user_conn(user) 126 | 127 | assert process = grumble_post_key(q, conn, :process, %{id: process.id}) 128 | assert Enum.count(process["outputs"]) == 5 129 | end 130 | 131 | test "Returns a list of economic events that are outputs and with an action consume" do 132 | user = fake_agent!() 133 | process = fake_process!(user) 134 | 135 | _output_events = 136 | some(5, fn -> 137 | fake_economic_event!(user, %{ 138 | output_of: process.id, 139 | action: "produce" 140 | }) 141 | end) 142 | 143 | _other_output_events = 144 | some(5, fn -> 145 | fake_economic_event!(user, %{ 146 | output_of: process.id, 147 | action: "raise" 148 | }) 149 | end) 150 | 151 | q = process_outputs_query(fields: economic_event_fields()) 152 | conn = user_conn(user) 153 | 154 | assert process = 155 | grumble_post_key(q, conn, :process, %{ 156 | id: process.id, 157 | action_id: "produce" 158 | }) 159 | 160 | assert Enum.count(process["outputs"]) == 5 161 | end 162 | end 163 | end 164 | -------------------------------------------------------------------------------- /test/track_trace/process_track_trace_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.Process.ProcessesTrackTraceTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | import ValueFlows.Test.Faking 8 | 9 | alias ValueFlows.Process.Processes 10 | 11 | describe "track" do 12 | test "Returns EconomicEvents that are outputs" do 13 | user = fake_agent!() 14 | process = fake_process!(user) 15 | 16 | _input_events = 17 | some(3, fn -> 18 | fake_economic_event!(user, %{ 19 | input_of: process.id, 20 | action: "consume" 21 | }) 22 | end) 23 | 24 | output_events = 25 | some(5, fn -> 26 | fake_economic_event!(user, %{ 27 | output_of: process.id, 28 | action: "produce" 29 | }) 30 | end) 31 | 32 | assert {:ok, events} = Processes.track(process) 33 | 34 | ids = Enum.map(events, & &1.id) 35 | 36 | for %{id: id} <- output_events do 37 | assert id in ids 38 | end 39 | end 40 | end 41 | 42 | describe "trace" do 43 | test "Return EconomicEvents that are inputs" do 44 | user = fake_agent!() 45 | process = fake_process!(user) 46 | 47 | input_events = 48 | some(3, fn -> 49 | fake_economic_event!(user, %{ 50 | input_of: process.id, 51 | action: "consume" 52 | }) 53 | end) 54 | 55 | _output_events = 56 | some(5, fn -> 57 | fake_economic_event!(user, %{ 58 | output_of: process.id, 59 | action: "produce" 60 | }) 61 | end) 62 | 63 | assert {:ok, events} = Processes.trace(process) 64 | assert Enum.map(events, & &1.id) == Enum.map(input_events, & &1.id) 65 | end 66 | end 67 | 68 | describe "inputs" do 69 | test "return EconomicEvents that are inputs" do 70 | user = fake_agent!() 71 | process = fake_process!(user) 72 | 73 | input_events = 74 | some(3, fn -> 75 | fake_economic_event!(user, %{ 76 | input_of: process.id, 77 | action: "consume" 78 | }) 79 | end) 80 | 81 | _output_events = 82 | some(5, fn -> 83 | fake_economic_event!(user, %{ 84 | output_of: process.id, 85 | action: "produce" 86 | }) 87 | end) 88 | 89 | assert {:ok, events} = Processes.inputs(process) 90 | assert Enum.map(events, & &1.id) == Enum.map(input_events, & &1.id) 91 | end 92 | 93 | test "return EconomicEvents that are inputs and with action consume" do 94 | user = fake_agent!() 95 | process = fake_process!(user) 96 | 97 | input_events = 98 | some(3, fn -> 99 | fake_economic_event!(user, %{ 100 | input_of: process.id, 101 | action: "consume" 102 | }) 103 | end) 104 | 105 | _other_input_events = 106 | some(5, fn -> 107 | fake_economic_event!(user, %{ 108 | input_of: process.id, 109 | action: "use" 110 | }) 111 | end) 112 | 113 | assert {:ok, events} = Processes.inputs(process, "consume") 114 | assert Enum.map(events, & &1.id) == Enum.map(input_events, & &1.id) 115 | end 116 | end 117 | 118 | describe "outputs" do 119 | test "return EconomicEvents that are ouputs" do 120 | user = fake_agent!() 121 | process = fake_process!(user) 122 | 123 | _input_events = 124 | some(3, fn -> 125 | fake_economic_event!(user, %{ 126 | input_of: process.id, 127 | action: "consume" 128 | }) 129 | end) 130 | 131 | output_events = 132 | some(5, fn -> 133 | fake_economic_event!(user, %{ 134 | output_of: process.id, 135 | action: "produce" 136 | }) 137 | end) 138 | 139 | assert {:ok, events} = Processes.outputs(process) 140 | assert Enum.map(events, & &1.id) == Enum.map(output_events, & &1.id) 141 | end 142 | 143 | test "return EconomicEvents that are ouputs and with action produce" do 144 | user = fake_agent!() 145 | process = fake_process!(user) 146 | 147 | _other_output_events = 148 | some(3, fn -> 149 | fake_economic_event!(user, %{ 150 | output_of: process.id, 151 | action: "raise" 152 | }) 153 | end) 154 | 155 | output_events = 156 | some(5, fn -> 157 | fake_economic_event!(user, %{ 158 | output_of: process.id, 159 | action: "produce" 160 | }) 161 | end) 162 | 163 | assert {:ok, events} = Processes.outputs(process, "produce") 164 | 165 | assert events |> Enum.map(& &1.id) |> Enum.sort() == 166 | output_events |> Enum.map(& &1.id) |> Enum.sort() 167 | end 168 | end 169 | end 170 | -------------------------------------------------------------------------------- /test/track_trace/resource_track_trace_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.EconomicResource.EconomicResourcesTrackTraceTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | 8 | import Bonfire.Geolocate.Simulate 9 | 10 | import ValueFlows.Test.Faking 11 | 12 | alias ValueFlows.EconomicResource.EconomicResources 13 | 14 | describe "EconomicResources.track" do 15 | test "Returns a list of EconomicEvents affecting the resource that are inputs to Processes " do 16 | user = fake_agent!() 17 | resource = fake_economic_resource!(user) 18 | process = fake_process!(user) 19 | 20 | input_events = 21 | some(3, fn -> 22 | fake_economic_event!(user, %{ 23 | input_of: process.id, 24 | resource_inventoried_as: resource.id, 25 | action: "use" 26 | }) 27 | end) 28 | 29 | _output_events = 30 | some(5, fn -> 31 | fake_economic_event!(user, %{ 32 | output_of: process.id, 33 | resource_inventoried_as: resource.id, 34 | action: "produce" 35 | }) 36 | end) 37 | 38 | assert {:ok, events} = EconomicResources.track(resource) 39 | 40 | ids = Enum.map(events, & &1.id) 41 | 42 | for %{id: id} <- input_events do 43 | assert id in ids 44 | end 45 | end 46 | 47 | test "Returns a list of transfer/move EconomicEvents with the resource defined as the resourceInventoriedAs" do 48 | user = fake_agent!() 49 | unit = maybe_fake_unit(user) 50 | 51 | resource = fake_economic_resource!(user, %{}, unit) 52 | 53 | input_events = 54 | some(3, fn -> 55 | fake_economic_event!( 56 | user, 57 | %{ 58 | resource_inventoried_as: resource.id, 59 | action: "transfer" 60 | }, 61 | unit 62 | ) 63 | end) 64 | 65 | assert {:ok, events} = EconomicResources.track(resource) 66 | 67 | ids = Enum.map(events, & &1.id) 68 | 69 | for %{id: id} <- input_events do 70 | assert id in ids 71 | end 72 | end 73 | end 74 | 75 | describe "EconomicResources.trace" do 76 | test "Returns a list of EconomicEvents affecting the resource that are outputs from Processes" do 77 | user = fake_agent!() 78 | resource = fake_economic_resource!(user) 79 | process = fake_process!(user) 80 | 81 | _input_events = 82 | some(3, fn -> 83 | fake_economic_event!(user, %{ 84 | input_of: process.id, 85 | to_resource_inventoried_as: resource.id, 86 | action: "use" 87 | }) 88 | end) 89 | 90 | output_events = 91 | some(5, fn -> 92 | fake_economic_event!(user, %{ 93 | output_of: process.id, 94 | to_resource_inventoried_as: resource.id, 95 | action: "produce" 96 | }) 97 | end) 98 | 99 | assert {:ok, trace_events} = EconomicResources.trace(resource) 100 | 101 | ids = Enum.map(trace_events, & &1.id) 102 | 103 | for %{id: id} <- output_events do 104 | assert id in ids 105 | end 106 | end 107 | 108 | test "Returns a list of transfer/move EconomicEvents with the resource defined as the toResourceInventoriedAs" do 109 | user = fake_agent!() 110 | unit = maybe_fake_unit(user) 111 | resource = fake_economic_resource!(user, %{}, unit) 112 | 113 | input_events = 114 | some(3, fn -> 115 | fake_economic_event!( 116 | user, 117 | %{ 118 | provider: user.id, 119 | receiver: user.id, 120 | to_resource_inventoried_as: resource.id, 121 | action: "transfer" 122 | }, 123 | unit 124 | ) 125 | end) 126 | 127 | assert {:ok, events} = EconomicResources.trace(resource) 128 | 129 | ids = Enum.map(events, & &1.id) 130 | 131 | for %{id: id} <- input_events do 132 | assert id in ids 133 | end 134 | end 135 | end 136 | end 137 | -------------------------------------------------------------------------------- /test/track_trace/track_and_trace_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.TrackAndTraceTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | import Bonfire.Common.Simulation 5 | 6 | import ValueFlows.Simulate 7 | 8 | # import Bonfire.Geolocate.Simulate 9 | 10 | # import ValueFlows.Test.Faking 11 | 12 | alias ValueFlows.EconomicEvent.EconomicEvents 13 | alias ValueFlows.EconomicResource.EconomicResources 14 | 15 | describe "Track" do 16 | test "starting from a resource we track the nested chain until the second level" do 17 | user = fake_agent!() 18 | unit = maybe_fake_unit(user) 19 | resource = fake_economic_resource!(user, %{}, unit) 20 | process = fake_process!(user) 21 | 22 | _input_events = 23 | some(3, fn -> 24 | fake_economic_event!( 25 | user, 26 | %{ 27 | input_of: process.id, 28 | resource_inventoried_as: resource.id, 29 | action: "use" 30 | }, 31 | unit 32 | ) 33 | end) 34 | 35 | assert {:ok, input_events} = EconomicResources.track(resource) 36 | 37 | for event <- input_events do 38 | assert {:ok, chain} = ValueFlows.EconomicEvent.Track.track(event) 39 | if length(chain) > 0, do: assert(process.id in Enum.map(chain, & &1.id)) 40 | end 41 | end 42 | end 43 | 44 | describe "Trace" do 45 | test "starting from a resource we trace the nested chain until the third level" do 46 | user = fake_agent!() 47 | unit = maybe_fake_unit(user) 48 | resource = fake_economic_resource!(user, %{}, unit) 49 | process = fake_process!(user) 50 | 51 | output_events = 52 | some(3, fn -> 53 | fake_economic_event!( 54 | user, 55 | %{ 56 | output_of: process.id, 57 | resource_inventoried_as: resource.id, 58 | action: "transfer" 59 | }, 60 | unit 61 | ) 62 | end) 63 | 64 | last_resource = List.last(output_events) |> Map.get(:to_resource_inventoried_as) 65 | 66 | assert {:ok, output_events} = EconomicResources.trace(last_resource) 67 | 68 | for event <- output_events do 69 | assert {:ok, chain} = ValueFlows.EconomicEvent.Trace.trace(event) 70 | if length(chain) > 0, do: assert(process.id in Enum.map(chain, & &1.id)) 71 | end 72 | end 73 | 74 | test "return an economic event that is not part of a process from tracing a resource" do 75 | user = fake_agent!() 76 | resource = fake_economic_resource!(user) 77 | 78 | _event = 79 | fake_economic_event!(user, %{ 80 | resource_inventoried_as: resource.id, 81 | action: "produce" 82 | }) 83 | 84 | assert {:ok, events} = EconomicResources.trace(resource) 85 | # IO.inspect(events) 86 | for event <- events do 87 | assert {:ok, []} = ValueFlows.EconomicEvent.Trace.trace(event) 88 | end 89 | end 90 | end 91 | end 92 | -------------------------------------------------------------------------------- /test/value_calculation/events_value_calculation_test.exs: -------------------------------------------------------------------------------- 1 | defmodule ValueFlows.EventsValueCalculationTest do 2 | use Bonfire.ValueFlows.DataCase, async: true 3 | 4 | import Bonfire.Quantify.Simulate, only: [fake_unit!: 1] 5 | import ValueFlows.Simulate 6 | import ValueFlows.Test.Faking 7 | import ValueFlows.Observe.Simulate 8 | 9 | alias ValueFlows.EconomicEvent.EconomicEvents 10 | alias ValueFlows.Knowledge.Action.Actions 11 | 12 | alias ValueFlows.Observe.Observations 13 | 14 | alias Decimal, as: D 15 | 16 | describe "create a reciprocal event" do 17 | test "that has a matching action" do 18 | user = fake_agent!() 19 | action = action() 20 | 21 | calc = 22 | fake_value_calculation!(user, %{ 23 | action: action.id, 24 | formula: "(+ 1 effortQuantity)" 25 | }) 26 | 27 | event = fake_economic_event!(user, %{action: action.id}) 28 | 29 | assert {:ok, reciprocal} = EconomicEvents.one(calculated_using_id: calc.id) 30 | 31 | assert reciprocal = EconomicEvents.preload_all(reciprocal) 32 | assert reciprocal.action_id == calc.value_action_id 33 | 34 | assert reciprocal.resource_quantity.has_numerical_value == 35 | 1.0 + reciprocal.effort_quantity.has_numerical_value 36 | end 37 | 38 | test "effort quantity if action is work or use" do 39 | user = fake_agent!() 40 | action = action() 41 | 42 | assert {:ok, value_action} = 43 | ["use", "work"] 44 | |> Faker.Util.pick() 45 | |> Actions.action() 46 | 47 | calc = 48 | fake_value_calculation!(user, %{ 49 | action: action.id, 50 | value_action: value_action.id, 51 | formula: "(+ 1 resourceQuantity)" 52 | }) 53 | 54 | event = fake_economic_event!(user, %{action: action.id}) 55 | 56 | assert {:ok, reciprocal} = EconomicEvents.one(calculated_using_id: calc.id) 57 | 58 | assert reciprocal = EconomicEvents.preload_all(reciprocal) 59 | 60 | assert reciprocal.effort_quantity.has_numerical_value == 61 | D.to_float( 62 | D.add( 63 | D.from_float(1.0), 64 | D.from_float(reciprocal.resource_quantity.has_numerical_value) 65 | ) 66 | ) 67 | end 68 | 69 | test "use of quality" do 70 | user = fake_agent!() 71 | unit = fake_unit!(user) 72 | action = action() 73 | 74 | calc = 75 | fake_value_calculation!(user, %{ 76 | action: action.id, 77 | formula: "(* quality resourceQuantity 2)" 78 | }) 79 | 80 | resource = fake_economic_resource!(user, %{}, unit) 81 | 82 | phenon = fake_observable_phenomenon!(user) 83 | 84 | assert {:ok, observation} = 85 | Observations.create( 86 | user, 87 | observation( 88 | %{}, 89 | resource, 90 | fake_observable_property!(user), 91 | phenon 92 | ) 93 | ) 94 | 95 | event = 96 | fake_economic_event!( 97 | user, 98 | %{ 99 | action: action.id, 100 | resource_inventoried_as: resource.id 101 | }, 102 | unit 103 | ) 104 | 105 | assert {:ok, reciprocal} = EconomicEvents.one(calculated_using_id: calc.id) 106 | 107 | assert reciprocal = EconomicEvents.preload_all(reciprocal) 108 | 109 | assert reciprocal.resource_quantity.has_numerical_value == 110 | D.to_float( 111 | D.mult( 112 | D.from_float(phenon.formula_quantifier), 113 | D.mult( 114 | D.from_float(reciprocal.resource_quantity.has_numerical_value), 115 | D.new(2) 116 | ) 117 | ) 118 | ) 119 | end 120 | 121 | # not needed for current project 122 | @skip 123 | test "side effects are computed correctly" do 124 | user = fake_agent!() 125 | action = action() 126 | calc = fake_value_calculation!(user, %{formula: "(* 0.5 effortQuantity)"}) 127 | event = fake_economic_event!(user, %{action: action.id}) 128 | 129 | assert false = "TODO" 130 | end 131 | end 132 | end 133 | -------------------------------------------------------------------------------- /tool-versions-to-env.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | # Script to read .tool-versions library versions into environment variables 6 | # in the form of _version= 7 | # credit to https://github.com/smartcontractkit/tool-versions-to-env-action 8 | 9 | # Create the functions 10 | 11 | print_help() { 12 | echo "Requires at least 1 argument" 13 | echo "Argument #1: Action options" 14 | echo " 1 = print to .env file" 15 | echo " Argument #2: Optional. The path to the .tool-versions file" 16 | echo " Argument #3: Optional. The path to the .env file to create" 17 | echo " 2 = sent to github action output" 18 | echo " Argument #2: Optional. The path to the .tool-versions file" 19 | echo " 3 = only print given variable to stdout" 20 | echo " Argument #2: Required. The tool version to output to stdout" 21 | echo " Argument #3: Optional. The path to the .tool-versions file" 22 | echo "" 23 | echo "Example for 1 or 2: tool-versions-to-env.sh 1" 24 | echo "Example for 3: tool-versions-to-env.sh 3 golang" 25 | } 26 | 27 | # First argument is a boolean of 0 = false or 1 = true to echo the variable. 28 | # Second argument is the string to echo to std out. 29 | to_echo() { 30 | if [ "$1" -eq 1 ]; then 31 | echo "$2" 32 | fi 33 | } 34 | 35 | # First argument is the output option, 36 | # 1 to .env 37 | # 2 to github action output 38 | # 3 none 39 | # Second arument is the .tool-versions location 40 | # Third argument is the .env location 41 | # Fourth argument is whether it is safe to echo to stdout 42 | read_tool_versions_write_to_env() { 43 | local -r how_to_echo="$1" 44 | local -r tool_versions_file="$2" 45 | local -r env_file="$3" 46 | local -r safe_to_echo="$4" 47 | 48 | # clear the env file before writing to it later 49 | if [ "$how_to_echo" -eq 1 ]; then 50 | echo "" >"${env_file}" 51 | fi 52 | # loop over each line of the .tool-versions file 53 | while read -r line; do 54 | to_echo "$safe_to_echo" "Original line: $line" 55 | 56 | # split the line into a bash array using the default space delimeter 57 | IFS=" " read -r -a lineArray <<<"$line" 58 | 59 | # get the key and value from the array, set the key to all uppercase 60 | key="${lineArray[0],,}" 61 | value="${lineArray[1]}" 62 | 63 | # ignore comments, comments always start with # 64 | if [[ ${key:0:1} != "#" ]]; then 65 | full_key="${key}_version" 66 | to_echo "${safe_to_echo}" "Parsed line: ${full_key}=${value}" 67 | # echo the variable to the .env file 68 | if [ "$how_to_echo" -eq 1 ]; then 69 | echo "${full_key^^}=${value}" >>"${env_file}" 70 | elif [ "$how_to_echo" -eq 2 ]; then 71 | echo "${full_key^^}=$value" >>"$GITHUB_ENV" 72 | elif [ "$how_to_echo" -eq 3 ]; then 73 | # echo "$value" 74 | # break 75 | echo " ${full_key^^}=${value}" 76 | fi 77 | fi 78 | done <"$tool_versions_file" 79 | } 80 | 81 | # Run the code 82 | 83 | if [ $# -eq 0 ]; then 84 | print_help 85 | exit 1 86 | fi 87 | 88 | # Action option 89 | ACTION_OPTION=${1} 90 | # path to the .tool-versions file 91 | TOOL_VERSIONS_FILE=${2:-"./.tool-versions"} 92 | # path to the .env file 93 | ENV_FILE=${3:-"./.env"} 94 | 95 | if [ "$ACTION_OPTION" -eq 1 ]; then 96 | # print to .env 97 | read_tool_versions_write_to_env 1 "$TOOL_VERSIONS_FILE" "$ENV_FILE" 1 98 | elif [ "$ACTION_OPTION" -eq 2 ]; then 99 | # print to github action 100 | read_tool_versions_write_to_env 2 "$TOOL_VERSIONS_FILE" "$ENV_FILE" 1 101 | elif [ "$ACTION_OPTION" -eq 3 ]; then 102 | TOOL_VERSIONS_FILE=${3:-"./.tool-versions"} 103 | # print single to variable to stdout 104 | read_tool_versions_write_to_env 3 "$TOOL_VERSIONS_FILE" "" 0 105 | else 106 | echo "First argument was not of option 1, 2, or 3" 107 | fi --------------------------------------------------------------------------------