├── VERSION ├── priv └── plts │ └── .gitkeep ├── .formatter.exs ├── lib └── hl7 │ ├── field_grammar.ex │ ├── invalid_header.ex │ ├── invalid_message.ex │ ├── selection.ex │ ├── raw_message.ex │ ├── invalid_grammar.ex │ ├── mllp_stream.ex │ ├── lexers │ ├── default.ex │ ├── default_with_copy.ex │ ├── dynamic.ex │ └── dynamic_with_copy.ex │ ├── list_parser.ex │ ├── separators.ex │ ├── split_stream.ex │ ├── path.ex │ ├── header.ex │ ├── segment_grammar.ex │ ├── segment.ex │ ├── examples.ex │ ├── message.ex │ └── path_parser.ex ├── .gitignore ├── bump_version.exs ├── Jenkinsfile ├── test ├── hl7 │ ├── examples_test.exs │ ├── path_test.exs │ ├── segment_test.exs │ ├── grammar_test.exs │ ├── message_props_test.exs │ ├── message_test.exs │ └── query_test.exs ├── test_helper.exs └── hl7_test.exs ├── parsec_source └── path_parser.ex.exs ├── README.md ├── mix.exs ├── benchmark.exs ├── CHANGELOG.md ├── .github └── workflows │ └── main.yml ├── main.md ├── mix.lock └── LICENSE /VERSION: -------------------------------------------------------------------------------- 1 | 0.12.1 -------------------------------------------------------------------------------- /priv/plts/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.formatter.exs: -------------------------------------------------------------------------------- 1 | # Used by "mix format" 2 | [ 3 | inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"] 4 | ] 5 | -------------------------------------------------------------------------------- /lib/hl7/field_grammar.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.FieldGrammar do 2 | @moduledoc deprecated: "Use `HL7` instead" 3 | @moduledoc "Represents an HL7 path." 4 | 5 | @doc deprecated: "Use `HL7.Path.new/1` instead" 6 | def to_indices(schema) do 7 | HL7.Path.new(schema) 8 | end 9 | end 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /_build 2 | /cover 3 | /deps 4 | /doc 5 | /.fetch 6 | erl_crash.dump 7 | *.ez 8 | *.beam 9 | /config/*.secret.exs 10 | .idea 11 | .elixir_ls 12 | .DS_Store 13 | /temp 14 | *.hl7 15 | *.csv 16 | 17 | # Ignore dialyzer plt and hash files 18 | /priv/plts/*.plt 19 | /priv/plts/*.hash -------------------------------------------------------------------------------- /bump_version.exs: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env elixir 2 | 3 | new_version = 4 | "VERSION" 5 | |> File.read!() 6 | |> String.trim() 7 | |> Version.parse!() 8 | |> (fn ver -> "#{ver.major}.#{ver.minor}.#{ver.patch + 1}" end).() 9 | 10 | File.write("VERSION", new_version) 11 | IO.puts("Version bumped to #{new_version}") 12 | -------------------------------------------------------------------------------- /Jenkinsfile: -------------------------------------------------------------------------------- 1 | pipeline { 2 | agent { 3 | label 'centos_agent' 4 | } 5 | environment { 6 | LANG = 'en_US.UTF-8' 7 | } 8 | stages { 9 | stage('Build/Test') { 10 | steps { 11 | sh 'mix deps.get' 12 | sh 'mix test' 13 | junit '_build/test/**/*.xml' 14 | } 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /test/hl7/examples_test.exs: -------------------------------------------------------------------------------- 1 | defmodule HL7.ExamplesTest do 2 | use ExUnit.Case 3 | require Logger 4 | doctest HL7.Examples 5 | 6 | test "The nist_syndromic_hl7 example is a valid message" do 7 | msg = 8 | HL7.Examples.nist_syndromic_hl7() 9 | |> HL7.Message.new() 10 | 11 | assert %HL7.Message{} = msg 12 | end 13 | end 14 | -------------------------------------------------------------------------------- /lib/hl7/invalid_header.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.InvalidHeader do 2 | @moduledoc """ 3 | Contains information concerning any failed attempt to parse an HL7 MSH segment. 4 | """ 5 | 6 | @type t :: %HL7.InvalidHeader{ 7 | raw: nil | String.t(), 8 | reason: nil | atom() 9 | } 10 | 11 | defstruct raw: nil, 12 | reason: nil 13 | end 14 | -------------------------------------------------------------------------------- /test/test_helper.exs: -------------------------------------------------------------------------------- 1 | ExUnit.configure(formatters: [JUnitFormatter, ExUnit.CLIFormatter]) 2 | ExUnit.start() 3 | 4 | defmodule HL7.TempFileCase do 5 | use ExUnit.CaseTemplate 6 | 7 | setup do 8 | File.mkdir_p!(tmp_path()) 9 | on_exit(fn -> File.rm_rf(tmp_path()) end) 10 | :ok 11 | end 12 | 13 | def tmp_path() do 14 | Path.expand("../../tmp", __DIR__) 15 | end 16 | 17 | def tmp_path(extra) do 18 | Path.join(tmp_path(), extra) 19 | end 20 | end 21 | -------------------------------------------------------------------------------- /lib/hl7/invalid_message.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.InvalidMessage do 2 | @moduledoc deprecated: "Use errors from `HL7` instead" 3 | 4 | @moduledoc """ 5 | Contains information concerning any failed attempt 6 | to parse an HL7 message, generally MSH-related 7 | """ 8 | 9 | @type t :: %HL7.InvalidMessage{ 10 | raw: nil | String.t(), 11 | header: nil | HL7.InvalidHeader.t(), 12 | created_at: nil | DateTime.t(), 13 | reason: nil | atom() 14 | } 15 | 16 | defstruct raw: nil, 17 | header: nil, 18 | created_at: nil, 19 | reason: nil 20 | end 21 | -------------------------------------------------------------------------------- /lib/hl7/selection.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Selection do 2 | @moduledoc false 3 | require Logger 4 | 5 | @type t :: %HL7.Selection{ 6 | segments: list(), 7 | prefix: list(), 8 | suffix: list(), 9 | data: map(), 10 | id: non_neg_integer(), 11 | complete: boolean(), 12 | broken: boolean(), 13 | valid: boolean(), 14 | fed: boolean() 15 | } 16 | 17 | defstruct segments: [], 18 | prefix: [], 19 | suffix: [], 20 | data: %{index: 1}, 21 | id: 0, 22 | complete: false, 23 | broken: false, 24 | valid: false, 25 | fed: false 26 | end 27 | -------------------------------------------------------------------------------- /lib/hl7/raw_message.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.RawMessage do 2 | require Logger 3 | 4 | @moduledoc """ 5 | Contains the raw text of an HL7 message alongside parsed header metadata from the MSH segment. 6 | 7 | Use `HL7.Message.raw/1` to generate the `t:HL7.RawMessage.t/0` struct 8 | """ 9 | 10 | @moduledoc deprecated: "Use `HL7.Header` instead" 11 | 12 | @type t :: %HL7.RawMessage{ 13 | raw: nil | binary(), 14 | header: nil | HL7.Header.t() 15 | } 16 | 17 | defstruct raw: nil, 18 | header: nil 19 | 20 | defimpl String.Chars, for: HL7.RawMessage do 21 | require Logger 22 | 23 | def to_string(%HL7.RawMessage{raw: raw_text}) do 24 | raw_text 25 | end 26 | end 27 | end 28 | -------------------------------------------------------------------------------- /lib/hl7/invalid_grammar.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.InvalidGrammar do 2 | @moduledoc false 3 | 4 | # Contains information concerning any failed attempt to parse Field or Segment Grammar Notations. 5 | @type invalid_token :: %HL7.InvalidGrammar{ 6 | invalid_token: String.t(), 7 | schema: String.t(), 8 | reason: :invalid_token 9 | } 10 | 11 | @type no_required_segments :: %HL7.InvalidGrammar{ 12 | invalid_token: String.t(), 13 | schema: String.t(), 14 | reason: :no_required_segments 15 | } 16 | 17 | @type t :: invalid_token | no_required_segments 18 | 19 | defstruct invalid_token: nil, schema: nil, reason: nil 20 | 21 | def invalid_token(token, schema) when is_binary(token) and is_binary(schema) do 22 | %HL7.InvalidGrammar{ 23 | invalid_token: token, 24 | schema: schema, 25 | reason: :invalid_token 26 | } 27 | end 28 | 29 | def no_required_segments do 30 | %HL7.InvalidGrammar{ 31 | reason: :no_required_segments 32 | } 33 | end 34 | end 35 | -------------------------------------------------------------------------------- /parsec_source/path_parser.ex.exs: -------------------------------------------------------------------------------- 1 | defmodule HL7.PathParser do 2 | 3 | # parsec:HL7.PathParser 4 | 5 | import NimbleParsec 6 | 7 | 8 | numeric_index = integer(min: 1) 9 | index = choice([numeric_index, string("*")]) 10 | 11 | bracketed_num = 12 | ignore(string("[")) 13 | |> concat(index) 14 | |> ignore(string("]")) 15 | 16 | defaulted_num = choice([bracketed_num, empty()]) 17 | 18 | dot = ignore(string(".")) 19 | dash = ignore(string("-")) 20 | 21 | bang = string("!") |> replace(true) |> tag(:truncate) 22 | 23 | field = optional(dash) |> concat(index) |> tag(:field) 24 | 25 | alpha = ascii_string([?A..?Z], 1) 26 | alpha_num = choice([ascii_string([?0..?9], 1), alpha]) 27 | 28 | segment_id = 29 | alpha 30 | |> concat(alpha_num) 31 | |> concat(alpha_num) 32 | |> reduce({Enum, :join, [""]}) 33 | |> tag(:segment) 34 | 35 | segment_num = defaulted_num |> tag(:segment_number) 36 | 37 | segment_header = optional(segment_id |> concat(segment_num)) 38 | 39 | repeat_num = defaulted_num |> tag(:repetition) 40 | 41 | component = index |> tag(:component) 42 | 43 | subcomponent = index |> tag(:subcomponent) 44 | 45 | component_part = 46 | dot 47 | |> concat(component) 48 | |> optional(concat(dot, subcomponent)) 49 | 50 | defparsec( 51 | :parse, 52 | segment_header 53 | |> optional(field) 54 | |> optional(repeat_num) 55 | |> optional(component_part) 56 | |> optional(bang) 57 | |> eos() 58 | ) 59 | 60 | 61 | # parsec:HL7.PathParser 62 | end 63 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Elixir HL7 2 | 3 | [![hex.pm version](https://img.shields.io/hexpm/v/elixir_hl7.svg)](https://hex.pm/packages/elixir_hl7) 4 | [![hex.pm downloads](https://img.shields.io/hexpm/dt/elixir_hl7.svg)](https://hex.pm/packages/elixir_hl7) 5 | [![hex.pm license](https://img.shields.io/hexpm/l/elixir_hl7.svg)](https://hex.pm/packages/elixir_hl7) 6 | 7 | An Elixir library for working with HL7 v2.x healthcare data. 8 | 9 | Elixir HL7 provides functions to parse, query and modify healthcare data that conforms to the HL7 v2.x standards. 10 | It should be able to reconstruct any HL7 message without data loss or corruption. 11 | 12 | It also provides basic support for reading HL7 file streams with configurable delimiters (MLLP included). 13 | 14 | This library has been tested on a wide variety of real-world HL7 messages to ensure correctness and flexibility. 15 | 16 | Since many HL7 messages do not strictly conform to the standards specified for each version, the library does not 17 | attempt to enforce limits such as character counts or structural expectations. 18 | 19 | In fact, HL7 uses implicit hierarchies within segments (by leaving out certain separators) and to group segments 20 | (via expected patterns only known to the consuming application). 21 | 22 | The majority of the API currently resides in the eponymous `HL7` module. 23 | 24 | You can learn more about HL7 here: 25 | * Wikipedia's [HL7 article](https://en.wikipedia.org/wiki/Health_Level_7) 26 | * The official HL7 website ([hl7.org](http://www.hl7.org/index.cfm)) 27 | 28 | Full documentation is available at [hex.pm](https://hexdocs.pm/elixir_hl7/main.html). 29 | 30 | Please [report an issue](https://github.com/HCA-Healthcare/elixir-hl7/issues) if something appears to be handled incorrectly. 31 | 32 | # License 33 | 34 | Elixir HL7 source code is released under Apache 2 License. Check the LICENSE file for more information. 35 | -------------------------------------------------------------------------------- /lib/hl7/mllp_stream.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.MLLPStream do 2 | @moduledoc false 3 | require Logger 4 | 5 | # ^K - VT (Vertical Tab) - 0x0B 6 | @sb "\v" 7 | # ^\ - FS (File Separator) 8 | @eb <<0x1C>> 9 | # ^M - CR (Carriage Return) - 0x0D 10 | @cr "\r" 11 | @ending @eb <> @cr 12 | 13 | @spec get_prefix() :: String.t() 14 | def get_prefix() do 15 | @sb 16 | end 17 | 18 | @spec get_suffix() :: String.t() 19 | def get_suffix() do 20 | @ending 21 | end 22 | 23 | @spec raw_to_messages(Enumerable.t()) :: Enumerable.t() 24 | def raw_to_messages(input_stream) do 25 | Stream.chunk_while(input_stream, "", &chunker/2, &after_chunking/1) |> Stream.concat() 26 | end 27 | 28 | @spec after_sb(String.t()) :: nil | String.t() 29 | defp after_sb(text) do 30 | chunks = text |> String.split(@sb, parts: 2) 31 | 32 | case chunks do 33 | [_chunk] -> nil 34 | [_sb, msg] -> msg 35 | end 36 | end 37 | 38 | @spec to_list_and_remnant(list()) :: {:cont, list(), String.t()} 39 | defp to_list_and_remnant(potential_messages) do 40 | [remnant | reverse_msgs] = potential_messages |> Enum.reverse() 41 | 42 | msgs = 43 | reverse_msgs 44 | |> Enum.map(&after_sb(&1)) 45 | |> Enum.filter(fn m -> is_binary(m) and m != "" end) 46 | |> Enum.reverse() 47 | 48 | {:cont, msgs, remnant} 49 | end 50 | 51 | @spec chunker(String.t(), String.t()) :: {:cont, String.t()} | {:cont, list(), String.t()} 52 | defp chunker(element, acc) when is_binary(element) do 53 | # {:cont, chunk, acc} | {:cont, acc} | {:halt, acc}) 54 | 55 | text = acc <> element 56 | potential_msg_list = String.split(text, @ending) 57 | 58 | case potential_msg_list do 59 | [not_found] -> {:cont, not_found} 60 | _ -> potential_msg_list |> to_list_and_remnant 61 | end 62 | end 63 | 64 | @spec after_chunking(any()) :: {:cont, []} 65 | defp after_chunking(_acc) do 66 | {:cont, []} 67 | end 68 | end 69 | -------------------------------------------------------------------------------- /lib/hl7/lexers/default.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Lexers.Default do 2 | @moduledoc false 3 | @carriage_return 4 4 | @field 3 5 | @repetition 2 6 | @component 1 7 | @sub_component 0 8 | 9 | def tokenize(<<"MSH|^~\\&#", rest::binary>> = text) do 10 | tokenize(rest, text, 9, 0, ["^~\\&#", @field, "|", @field, "MSH"]) 11 | end 12 | 13 | def tokenize(<<"MSH|^~\\&", rest::binary>> = text) do 14 | tokenize(rest, text, 8, 0, ["^~\\&", @field, "|", @field, "MSH"]) 15 | end 16 | 17 | def tokenize(text) do 18 | HL7.Lexers.Dynamic.tokenize(text) 19 | end 20 | 21 | defp tokenize(<<"|", rest::binary>>, original, skip, len, acc) do 22 | tokenize_terminator(rest, original, skip, len, acc, @field) 23 | end 24 | 25 | defp tokenize(<<"~", rest::binary>>, original, skip, len, acc) do 26 | tokenize_terminator(rest, original, skip, len, acc, @repetition) 27 | end 28 | 29 | defp tokenize(<<"^", rest::binary>>, original, skip, len, acc) do 30 | tokenize_terminator(rest, original, skip, len, acc, @component) 31 | end 32 | 33 | defp tokenize(<<"&", rest::binary>>, original, skip, len, acc) do 34 | tokenize_terminator(rest, original, skip, len, acc, @sub_component) 35 | end 36 | 37 | defp tokenize(<<"\r", rest::binary>>, original, skip, len, acc) do 38 | tokenize_terminator(rest, original, skip, len, acc, @carriage_return) 39 | end 40 | 41 | defp tokenize(<<"\n", rest::binary>>, original, skip, len, acc) do 42 | tokenize_terminator(rest, original, skip, len, acc, @carriage_return) 43 | end 44 | 45 | defp tokenize(<<_char::binary-size(1), rest::binary>>, original, skip, len, acc) do 46 | tokenize(rest, original, skip, len + 1, acc) 47 | end 48 | 49 | defp tokenize("", _original, _skip, 0, acc) do 50 | Enum.reverse(acc) 51 | end 52 | 53 | defp tokenize("", original, skip, len, acc) do 54 | string = binary_part(original, skip, len) 55 | Enum.reverse([string | acc]) 56 | end 57 | 58 | defp tokenize_terminator(text, original, skip, 0, acc, terminator) do 59 | tokenize(text, original, skip + 1, 0, [terminator | acc]) 60 | end 61 | 62 | defp tokenize_terminator(text, original, skip, len, acc, terminator) do 63 | string = binary_part(original, skip, len) 64 | tokenize(text, original, skip + len + 1, 0, [terminator, string | acc]) 65 | end 66 | 67 | @compile {:inline, tokenize: 5, tokenize_terminator: 6} 68 | end 69 | -------------------------------------------------------------------------------- /lib/hl7/lexers/default_with_copy.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Lexers.DefaultWithCopy do 2 | @moduledoc false 3 | @carriage_return 4 4 | @field 3 5 | @repetition 2 6 | @component 1 7 | @sub_component 0 8 | 9 | def tokenize(<<"MSH|^~\\&#", rest::binary>> = text) do 10 | tokenize(rest, text, 9, 0, ["^~\\&#", @field, "|", @field, "MSH"]) 11 | end 12 | 13 | def tokenize(<<"MSH|^~\\&", rest::binary>> = text) do 14 | tokenize(rest, text, 8, 0, ["^~\\&", @field, "|", @field, "MSH"]) 15 | end 16 | 17 | def tokenize(text) do 18 | HL7.Lexers.DynamicWithCopy.tokenize(text) 19 | end 20 | 21 | defp tokenize(<<"|", rest::binary>>, original, skip, len, acc) do 22 | tokenize_terminator(rest, original, skip, len, acc, @field) 23 | end 24 | 25 | defp tokenize(<<"~", rest::binary>>, original, skip, len, acc) do 26 | tokenize_terminator(rest, original, skip, len, acc, @repetition) 27 | end 28 | 29 | defp tokenize(<<"^", rest::binary>>, original, skip, len, acc) do 30 | tokenize_terminator(rest, original, skip, len, acc, @component) 31 | end 32 | 33 | defp tokenize(<<"&", rest::binary>>, original, skip, len, acc) do 34 | tokenize_terminator(rest, original, skip, len, acc, @sub_component) 35 | end 36 | 37 | defp tokenize(<<"\r", rest::binary>>, original, skip, len, acc) do 38 | tokenize_terminator(rest, original, skip, len, acc, @carriage_return) 39 | end 40 | 41 | defp tokenize(<<"\n", rest::binary>>, original, skip, len, acc) do 42 | tokenize_terminator(rest, original, skip, len, acc, @carriage_return) 43 | end 44 | 45 | defp tokenize(<<_char::binary-size(1), rest::binary>>, original, skip, len, acc) do 46 | tokenize(rest, original, skip, len + 1, acc) 47 | end 48 | 49 | defp tokenize("", _original, _skip, 0, acc) do 50 | Enum.reverse(acc) 51 | end 52 | 53 | defp tokenize("", original, skip, len, acc) do 54 | string = binary_part(original, skip, len) |> :binary.copy() 55 | Enum.reverse([string | acc]) 56 | end 57 | 58 | defp tokenize_terminator(text, original, skip, 0, acc, terminator) do 59 | tokenize(text, original, skip + 1, 0, [terminator | acc]) 60 | end 61 | 62 | defp tokenize_terminator(text, original, skip, len, acc, terminator) do 63 | string = binary_part(original, skip, len) |> :binary.copy() 64 | tokenize(text, original, skip + len + 1, 0, [terminator, string | acc]) 65 | end 66 | 67 | @compile {:inline, tokenize: 5, tokenize_terminator: 6} 68 | end 69 | -------------------------------------------------------------------------------- /lib/hl7/list_parser.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Parser do 2 | @moduledoc false 3 | @carriage_return 4 4 | @field 3 5 | @repetition 2 6 | @component 1 7 | @sub_component 0 8 | 9 | def parse(text, nil, copy) do 10 | case copy do 11 | true -> HL7.Lexers.DefaultWithCopy.tokenize(text) 12 | false -> HL7.Lexers.Default.tokenize(text) 13 | end 14 | |> to_lists() 15 | end 16 | 17 | def parse(text, _separators, copy) do 18 | case copy do 19 | true -> HL7.Lexers.DynamicWithCopy.tokenize(text) 20 | false -> HL7.Lexers.Dynamic.tokenize(text) 21 | end 22 | |> to_lists() 23 | end 24 | 25 | defp to_lists(tokens) do 26 | split_by(tokens, @carriage_return) 27 | |> Enum.reject(&(&1 == [])) 28 | |> Enum.map(&to_segment/1) 29 | end 30 | 31 | defp to_segment(tokens) do 32 | split_by(tokens, @field) 33 | |> Enum.map(&to_field/1) 34 | end 35 | 36 | defp to_field([]) do 37 | "" 38 | end 39 | 40 | defp to_field([text]) when is_binary(text) do 41 | text 42 | end 43 | 44 | defp to_field(tokens) do 45 | split_by(tokens, @repetition) 46 | |> Enum.map(&to_repetition/1) 47 | end 48 | 49 | defp to_repetition([]) do 50 | "" 51 | end 52 | 53 | defp to_repetition([token]) when is_binary(token) do 54 | token 55 | end 56 | 57 | defp to_repetition(tokens) do 58 | split_by(tokens, @component) 59 | |> Enum.map(&to_component/1) 60 | end 61 | 62 | defp to_component([]) do 63 | "" 64 | end 65 | 66 | defp to_component([token]) when is_binary(token) do 67 | token 68 | end 69 | 70 | defp to_component(tokens) do 71 | split_by(tokens, @sub_component) 72 | |> Enum.map(&to_sub_component/1) 73 | end 74 | 75 | defp to_sub_component([]) do 76 | "" 77 | end 78 | 79 | defp to_sub_component([token]) when is_binary(token) do 80 | token 81 | end 82 | 83 | defp split_by(tokens, delimiter) do 84 | split_by(tokens, delimiter, [], []) 85 | end 86 | 87 | defp split_by([], _delimiter, buffer, result) do 88 | Enum.reverse([Enum.reverse(buffer) | result]) 89 | end 90 | 91 | defp split_by([delimiter | rest], delimiter, buffer, result) do 92 | split_by(rest, delimiter, [], [Enum.reverse(buffer) | result]) 93 | end 94 | 95 | defp split_by([token | rest], delimiter, buffer, result) do 96 | split_by(rest, delimiter, [token | buffer], result) 97 | end 98 | 99 | @compile {:inline, 100 | split_by: 2, 101 | split_by: 4, 102 | to_lists: 1, 103 | to_segment: 1, 104 | to_field: 1, 105 | to_repetition: 1, 106 | to_component: 1, 107 | to_sub_component: 1} 108 | end 109 | -------------------------------------------------------------------------------- /lib/hl7/separators.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Separators do 2 | require Logger 3 | 4 | @moduledoc """ 5 | Contains HL7 delimiter information (optionally specified by the end system) used to parse or generate HL7 messages. 6 | """ 7 | 8 | @moduledoc deprecated: "Use `HL7` instead" 9 | 10 | # default HL7 separators 11 | # |^~\&# 12 | 13 | @type t :: %HL7.Separators{ 14 | field: binary(), 15 | component: binary(), 16 | field_repeat: binary(), 17 | escape_char: binary(), 18 | subcomponent: binary(), 19 | encoding_characters: binary(), 20 | delimiter_check: [binary()], 21 | truncation_char: binary() 22 | } 23 | 24 | defstruct field: "|", 25 | component: "^", 26 | field_repeat: "~", 27 | escape_char: "\\", 28 | subcomponent: "&", 29 | encoding_characters: "^~\\&", 30 | delimiter_check: ["&", "^", "~"], 31 | truncation_char: "" 32 | 33 | @spec new(String.t()) :: HL7.Separators.t() 34 | def new( 35 | <<"MSH", field_separator::binary-size(1), encoding_characters::binary-size(4), 36 | truncation_char::binary-size(1), field_separator::binary-size(1), 37 | _tail::binary>> = _raw_text 38 | ) 39 | when truncation_char != field_separator do 40 | new(field_separator, encoding_characters <> truncation_char) 41 | end 42 | 43 | def new( 44 | <<"MSH", field_separator::binary-size(1), encoding_characters::binary-size(4), 45 | field_separator::binary-size(1), _tail::binary>> = _raw_text 46 | ) do 47 | new(field_separator, encoding_characters) 48 | end 49 | 50 | # fallback to defaults if incorrect or missing 51 | def new(_) do 52 | %HL7.Separators{} 53 | end 54 | 55 | @spec new(binary(), binary()) :: HL7.Separators.t() 56 | def new("|", <<"^~\\&", truncation_char::binary>> = encoding_chars) do 57 | %HL7.Separators{truncation_char: truncation_char, encoding_characters: encoding_chars} 58 | end 59 | 60 | def new( 61 | <>, 62 | <> = encoding_characters 64 | ) do 65 | %HL7.Separators{ 66 | field: field_separator, 67 | component: component, 68 | field_repeat: field_repeat, 69 | escape_char: escape_char, 70 | subcomponent: subcomponent, 71 | delimiter_check: [subcomponent, component, field_repeat], 72 | encoding_characters: encoding_characters, 73 | truncation_char: truncation_char 74 | } 75 | end 76 | 77 | # fallback to defaults if incorrect or missing 78 | def new(_, _) do 79 | %HL7.Separators{} 80 | end 81 | end 82 | -------------------------------------------------------------------------------- /mix.exs: -------------------------------------------------------------------------------- 1 | defmodule HL7.MixProject do 2 | use Mix.Project 3 | 4 | def project do 5 | [ 6 | app: :elixir_hl7, 7 | version: String.trim(File.read!("./VERSION")), 8 | description: "An Elixir library for working with HL7 v2.x healthcare data", 9 | source_url: github_link(), 10 | package: package(), 11 | elixir: "~> 1.18", 12 | start_permanent: Mix.env() == :prod, 13 | deps: deps(), 14 | dialyzer: [ 15 | flags: [ 16 | :error_handling, 17 | :underspecs, 18 | :unmatched_returns | conditional_dialyzer_flags(System.otp_release()) 19 | ] 20 | ], 21 | test_coverage: [tool: ExCoveralls], 22 | preferred_cli_env: [ 23 | coveralls: :test, 24 | "coveralls.detail": :test, 25 | "coveralls.post": :test, 26 | "coveralls.html": :test 27 | ], 28 | aliases: aliases(), 29 | 30 | # Docs 31 | docs: [ 32 | api_reference: false, 33 | extras: ["main.md"], 34 | main: "main", 35 | groups_for_modules: [ 36 | Deprecated: [ 37 | HL7.FieldGrammar, 38 | HL7.InvalidGrammar, 39 | HL7.InvalidMessage, 40 | HL7.Message, 41 | HL7.Parser, 42 | HL7.PathParser, 43 | HL7.Query, 44 | HL7.RawMessage, 45 | HL7.Segment, 46 | HL7.Separators 47 | ] 48 | ], 49 | nest_modules_by_prefix: ["HL7"] 50 | ] 51 | ] 52 | end 53 | 54 | defp aliases() do 55 | [ 56 | "parsec.compile": [ 57 | "nimble_parsec.compile parsec_source/path_parser.ex.exs -o lib/hl7/path_parser.ex" 58 | ] 59 | ] 60 | end 61 | 62 | defp conditional_dialyzer_flags(otp_release) do 63 | case String.to_integer(otp_release) do 64 | x when x < 25 -> 65 | [] 66 | 67 | _ -> 68 | [:missing_return, :extra_return] 69 | end 70 | end 71 | 72 | defp github_link() do 73 | "https://github.com/HCA-Healthcare/elixir-hl7" 74 | end 75 | 76 | defp package() do 77 | [ 78 | name: "elixir_hl7", 79 | files: ~w(lib .formatter.exs mix.exs benchmark.exs README* LICENSE* VERSION*), 80 | licenses: ["Apache 2.0"], 81 | links: %{"GitHub" => github_link()}, 82 | maintainers: ["Scott Southworth", "Bryan Hunter"] 83 | ] 84 | end 85 | 86 | defp deps do 87 | [ 88 | {:benchee, "~> 1.1.0", only: :dev}, 89 | {:dialyxir, "~> 1.4.1", only: [:dev, :test], runtime: false}, 90 | {:ex_doc, "~> 0.38.2", only: [:dev, :test], runtime: false}, 91 | {:junit_formatter, "~> 3.3.1", only: :test}, 92 | {:propcheck, "~> 1.4.1", only: [:test, :dev]}, 93 | {:nimble_parsec, "~> 1.4.0", only: [:test, :dev], runtime: false}, 94 | {:excoveralls, "~> 0.18", only: :test} 95 | ] 96 | end 97 | end 98 | -------------------------------------------------------------------------------- /lib/hl7/split_stream.ex: -------------------------------------------------------------------------------- 1 | # defmodule HL7.SplitStream do 2 | # @moduledoc false 3 | # 4 | # require Logger 5 | # 6 | # @type prefix :: String.t() | Regex.t() | nil 7 | # @type suffix :: String.t() | Regex.t() 8 | # 9 | # @spec raw_to_messages(Enumerable.t(), prefix(), suffix()) :: Enumerable.t() 10 | # def raw_to_messages(input_stream, prefix, suffix) do 11 | # chunker = get_chunker(prefix, suffix) 12 | # Stream.chunk_while(input_stream, "", chunker, &after_chunking/1) |> Stream.concat() 13 | # end 14 | # 15 | # @spec get_after_prefix(prefix()) :: function() 16 | # defp get_after_prefix(nil) do 17 | # fn text -> text end 18 | # end 19 | # 20 | # defp get_after_prefix(prefix) when is_binary(prefix) do 21 | # fn text -> 22 | # chunks = text |> String.split(prefix, parts: 2) 23 | # 24 | # case chunks do 25 | # [_chunk] -> nil 26 | # [_sb, msg] -> msg 27 | # end 28 | # end 29 | # end 30 | # 31 | # defp get_after_prefix(%Regex{} = prefix) do 32 | # fn text -> 33 | # chunks = Regex.split(prefix, text, parts: 2) 34 | # 35 | # case chunks do 36 | # [_chunk] -> nil 37 | # [_sb, msg] -> msg 38 | # end 39 | # end 40 | # end 41 | # 42 | # @spec get_split_on_suffix(suffix()) :: (binary() -> [binary()]) 43 | # defp get_split_on_suffix(suffix) when is_binary(suffix) do 44 | # fn text -> String.split(text, suffix) end 45 | # end 46 | # 47 | # defp get_split_on_suffix(%Regex{} = suffix) do 48 | # fn text -> Regex.split(suffix, text, trim: true) end 49 | # end 50 | # 51 | # @spec get_chunker(String.t(), Regex.t()) :: function() 52 | # defp get_chunker(prefix, suffix) do 53 | # to_list_and_remnant = get_to_list_and_remnant(prefix) 54 | # split_on_suffix = get_split_on_suffix(suffix) 55 | # 56 | # fn 57 | # element, acc when is_binary(element) -> 58 | # # {:cont, chunk, acc} | {:cont, acc} | {:halt, acc}) 59 | # 60 | # text = acc <> element 61 | # potential_msg_list = split_on_suffix.(text) 62 | # 63 | # case potential_msg_list do 64 | # [not_found] -> {:cont, not_found} 65 | # _ -> to_list_and_remnant.(potential_msg_list) 66 | # end 67 | # 68 | # _element, _acc -> 69 | # raise(ArgumentError, message: "all elements in an HL7 stream must be binary") 70 | # end 71 | # end 72 | # 73 | # @spec get_to_list_and_remnant(prefix()) :: function() 74 | # defp get_to_list_and_remnant(prefix) do 75 | # after_prefix = get_after_prefix(prefix) 76 | # 77 | # fn potential_messages -> 78 | # [remnant | reverse_msgs] = potential_messages |> Enum.reverse() 79 | # 80 | # msgs = 81 | # reverse_msgs 82 | # |> Enum.map(fn m -> after_prefix.(m) end) 83 | # |> Enum.filter(fn m -> is_binary(m) and m != "" end) 84 | # |> Enum.reverse() 85 | # 86 | # {:cont, msgs, remnant} 87 | # end 88 | # end 89 | # 90 | # defp after_chunking(_acc) do 91 | # {:cont, []} 92 | # end 93 | # end 94 | -------------------------------------------------------------------------------- /test/hl7/path_test.exs: -------------------------------------------------------------------------------- 1 | defmodule HL7.PathTest do 2 | use ExUnit.Case 3 | doctest HL7.Path 4 | import HL7, only: :sigils 5 | 6 | test "creates an Path sigil" do 7 | assert match?(%HL7.Path{}, ~p"OBX") 8 | end 9 | 10 | test "notes the segment name and default segment number" do 11 | result = ~p"OBX" 12 | assert 1 == result.segment_number 13 | assert nil == result.field 14 | refute result.truncate 15 | assert "OBX" == result.segment 16 | end 17 | 18 | test "notes the segment name and specific segment number" do 19 | result = ~p"OBX[3]" 20 | assert 3 == result.segment_number 21 | assert nil == result.field 22 | assert "OBX" == result.segment 23 | end 24 | 25 | test "notes the segment name with a wildcard to select all of them" do 26 | result = ~p"OBX[*]" 27 | assert "*" == result.segment_number 28 | assert nil == result.field 29 | assert "OBX" == result.segment 30 | end 31 | 32 | test "notes the selected field of the given segment with the default repetition" do 33 | result = ~p"OBX-5" 34 | assert 5 == result.field 35 | assert 1 == result.repetition 36 | assert 1 == result.segment_number 37 | assert "OBX" == result.segment 38 | end 39 | 40 | test "notes a specific repetition" do 41 | result = ~p"OBX-5[2]" 42 | assert 5 == result.field 43 | assert 2 == result.repetition 44 | assert 1 == result.segment_number 45 | assert "OBX" == result.segment 46 | end 47 | 48 | test "prevents a repetition without a field" do 49 | assert_raise ArgumentError, fn -> HL7.Path.new("[3]") end 50 | end 51 | 52 | test "notes a wildcard repetition" do 53 | result = ~p"OBX-5[*]" 54 | assert 5 == result.field 55 | assert "*" == result.repetition 56 | assert 1 == result.segment_number 57 | assert "OBX" == result.segment 58 | end 59 | 60 | test "notes a wildcard repetition with a wildcard segment selection" do 61 | result = ~p"OBX[*]-5[*]" 62 | assert 5 == result.field 63 | assert "*" == result.repetition 64 | assert "*" == result.segment_number 65 | assert "OBX" == result.segment 66 | end 67 | 68 | test "notes the selected field and component" do 69 | result = ~p"OBX-5.2" 70 | assert 5 == result.field 71 | assert 1 == result.repetition 72 | assert 2 == result.component 73 | assert 1 == result.segment_number 74 | assert "OBX" == result.segment 75 | end 76 | 77 | test "notes the selected field, component and subcomponent" do 78 | result = ~p"OBX-5.2.4" 79 | assert 5 == result.field 80 | assert 1 == result.repetition 81 | assert 2 == result.component 82 | assert 4 == result.subcomponent 83 | assert 1 == result.segment_number 84 | assert "OBX" == result.segment 85 | end 86 | 87 | test "notes truncation" do 88 | result = ~p"OBX[2]-5!" 89 | assert 5 == result.field 90 | assert 1 == result.repetition 91 | assert 2 == result.segment_number 92 | assert result.truncate 93 | assert "OBX" == result.segment 94 | end 95 | end 96 | -------------------------------------------------------------------------------- /benchmark.exs: -------------------------------------------------------------------------------- 1 | alias Benchee 2 | alias HL7.Message 3 | alias HL7.Examples 4 | 5 | msg = Examples.wikipedia_sample_hl7() 6 | alt = Examples.wikipedia_sample_hl7_alt_delimiters() 7 | list = msg |> Message.to_list() 8 | 9 | Benchee.run(%{ 10 | "raw" => fn -> msg |> Message.raw() end, 11 | "new-map" => fn -> msg |> HL7.new!() end, 12 | "new" => fn -> msg |> Message.new() end, 13 | "new-copy" => fn -> msg |> Message.new(%{copy: true}) end, 14 | "new-alt" => fn -> alt |> Message.new() end, 15 | "round-trip" => fn -> msg |> Message.new() |> to_string() end 16 | }) 17 | 18 | # raw = parsed header info, 19 | # new = fully parsed with default delimiters 20 | # new-alt = fully parsed with custom delimiters 21 | # round-trip = fully parsed then back to text 22 | 23 | # Name ips average deviation median 99th % 24 | 25 | # with elixir 1.7.x 26 | 27 | # raw 48.60 K 20.58 μs ±38.76% 19 μs 42 μs 28 | # new 8.73 K 114.54 μs ±11.34% 111 μs 168 μs 29 | # round-trip 6.17 K 162.17 μs ±10.16% 159 μs 230.12 μs 30 | 31 | # with erlang 22.0.7 and elixir 1.9.1 32 | 33 | # raw 58.89 K 16.98 μs ±54.32% 16 μs 37 μs 34 | # new 8.91 K 112.26 μs ±17.20% 108 μs 174 μs 35 | # round-trip 6.49 K 154.11 μs ±12.37% 151 μs 225 μs 36 | 37 | # with erlang 24 elixir 1.12 38 | 39 | # raw 61.83 K 16.17 μs ±83.80% 15 μs 33 μs 40 | # new 9.87 K 101.35 μs ±15.06% 97 μs 150 μs 41 | # round-trip 7.32 K 136.69 μs ±9.41% 134 μs 191 μs 42 | 43 | # with erlang 25 elixir 1.14.1 44 | # with tokenizer hot path for default parsing (~6x faster new!) 45 | 46 | # Name ips average deviation median 99th % 47 | # raw 72.73 K 13.75 μs ±71.83% 12.91 μs 25.87 μs 48 | # new 59.81 K 16.72 μs ±30.01% 15.00 μs 35.00 μs 49 | # round-trip 21.31 K 46.92 μs ±18.10% 44.65 μs 78.95 μs 50 | # new-alt 15.51 K 64.46 μs ±14.07% 62.27 μs 102.61 μs 51 | 52 | # with binary copy option 53 | 54 | # raw 73.43 K 13.62 μs ±62.71% 12.80 μs 25.18 μs 55 | # new 55.93 K 17.88 μs ±29.34% 15.83 μs 36.76 μs 56 | # new-copy 47.44 K 21.08 μs ±30.22% 19.14 μs 41.36 μs 57 | # round-trip 20.40 K 49.02 μs ±20.06% 46.53 μs 86.84 μs 58 | # new-alt 15.64 K 63.95 μs ±14.60% 61.53 μs 104.48 μs 59 | 60 | # add Maps output for use with the new HPath sigil 61 | 62 | # raw 137.72 K 63 | # new 43.00 K - 3.20x slower +15.99 μs 64 | # new-copy 38.38 K - 3.59x slower +18.80 μs 65 | # maps 32.75 K - 4.21x slower +23.27 μs 66 | # round-trip 22.61 K - 6.09x slower +36.96 μs 67 | # new-alt 16.78 K - 8.21x slower +52.32 μs 68 | -------------------------------------------------------------------------------- /test/hl7/segment_test.exs: -------------------------------------------------------------------------------- 1 | defmodule HL7.SegmentTest do 2 | use ExUnit.Case 3 | require Logger 4 | 5 | doctest HL7.Segment 6 | 7 | test "Can replace a segment field with a string" do 8 | pid = 9 | HL7.Examples.wikipedia_sample_hl7() 10 | |> HL7.Message.new() 11 | |> HL7.Message.to_list() 12 | |> Enum.at(2) 13 | 14 | new_pid = HL7.Segment.replace_part(pid, "ZZZ", 0) 15 | segment_name = new_pid |> HL7.Segment.get_part(0) 16 | assert segment_name == "ZZZ" 17 | end 18 | 19 | test "Can inject a segment field" do 20 | pid = 21 | HL7.Examples.wikipedia_sample_hl7() 22 | |> HL7.Message.new() 23 | |> HL7.Message.to_list() 24 | |> Enum.at(2) 25 | 26 | new_pid = HL7.Segment.replace_part(pid, "ZZZ", 20) 27 | v = new_pid |> HL7.Segment.get_part(20) 28 | assert v == "ZZZ" 29 | end 30 | 31 | test "Can inject a segment component" do 32 | pid = 33 | HL7.Examples.wikipedia_sample_hl7() 34 | |> HL7.Message.new() 35 | |> HL7.Message.to_list() 36 | |> Enum.at(2) 37 | 38 | new_pid = HL7.Segment.replace_part(pid, "sub here", 3, 0, 1) 39 | v = new_pid |> HL7.Segment.get_part(3, 0, 1) 40 | assert v == "sub here" 41 | end 42 | 43 | test "Can inject a segment subcomponent" do 44 | pid = 45 | HL7.Examples.wikipedia_sample_hl7() 46 | |> HL7.Message.new() 47 | |> HL7.Message.to_list() 48 | |> Enum.at(2) 49 | 50 | new_pid = HL7.Segment.replace_part(pid, "sub here", 3, 0, 1, 2) 51 | v = new_pid |> HL7.Segment.get_part(3, 0, 1, 2) 52 | assert v == "sub here" 53 | end 54 | 55 | test "Can replace a segment field with a list" do 56 | pid = 57 | HL7.Examples.wikipedia_sample_hl7() 58 | |> HL7.Message.new() 59 | |> HL7.Message.to_list() 60 | |> Enum.at(2) 61 | 62 | new_pid = HL7.Segment.replace_part(pid, [["sleep", "sleep", ["and", "more_sleep"]]], 1) 63 | v = new_pid |> HL7.Segment.get_part(1, 1, 3, 2) 64 | assert v == "more_sleep" 65 | end 66 | 67 | test "Can replace a segment field with a function" do 68 | pid = 69 | HL7.Examples.wikipedia_sample_hl7() 70 | |> HL7.Message.new() 71 | |> HL7.Message.to_list() 72 | |> Enum.at(2) 73 | 74 | new_pid = HL7.Segment.replace_part(pid, fn d -> d <> "X" end, 8) 75 | v = new_pid |> HL7.Segment.get_part(8) 76 | assert v == "MX" 77 | end 78 | 79 | test "Can replace a segment repetition" do 80 | pid = 81 | HL7.Examples.wikipedia_sample_hl7() 82 | |> HL7.Message.new() 83 | |> HL7.Message.to_list() 84 | |> Enum.at(2) 85 | 86 | new_pid = HL7.Segment.replace_part(pid, "redacted", 11, 1) 87 | v = new_pid |> HL7.Segment.get_part(11, 1) 88 | assert v == "redacted" 89 | end 90 | 91 | test "Can inject a segment repetition" do 92 | pid = 93 | HL7.Examples.wikipedia_sample_hl7() 94 | |> HL7.Message.new() 95 | |> HL7.Message.to_list() 96 | |> Enum.at(2) 97 | 98 | new_pid = HL7.Segment.replace_part(pid, "redacted", 11, 3) 99 | v = new_pid |> HL7.Segment.get_part(11, 3) 100 | assert v == "redacted" 101 | end 102 | end 103 | -------------------------------------------------------------------------------- /lib/hl7/path.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Path do 2 | @moduledoc """ 3 | A module and struct representing an HL7 path. This is typically created through the `HL7.sigil_p/2` macro 4 | in order to gain compile-time correctness guarantees. 5 | """ 6 | 7 | defstruct segment: nil, 8 | segment_number: nil, 9 | field: nil, 10 | repetition: nil, 11 | component: nil, 12 | subcomponent: nil, 13 | truncate: false, 14 | data: nil, 15 | path: nil 16 | 17 | @type t() :: %__MODULE__{} 18 | 19 | @doc ~S""" 20 | Creates an HL7 path struct at runtime. 21 | 22 | ## Examples 23 | 24 | iex> HL7.Examples.wikipedia_sample_hl7() 25 | ...> |> HL7.new!() 26 | ...> |> HL7.get(HL7.Path.new("OBX-5")) 27 | "1.80" 28 | """ 29 | 30 | def new(path) do 31 | import HL7.PathParser 32 | {:ok, data, _, _, _, _} = parse(path) 33 | 34 | path_map = 35 | %__MODULE__{} 36 | |> Map.merge(Map.new(data, fn {k, v} -> {k, List.first(v)} end)) 37 | |> apply_default_repetition() 38 | |> apply_default_segment_number() 39 | 40 | %__MODULE__{ 41 | path_map 42 | | path: path, 43 | data: get_data(path, path_map) 44 | } 45 | end 46 | 47 | defp apply_default_segment_number(%__MODULE__{segment: nil} = path_map) do 48 | path_map 49 | end 50 | 51 | defp apply_default_segment_number(%__MODULE__{segment_number: nil} = path_map) do 52 | Map.put(path_map, :segment_number, 1) 53 | end 54 | 55 | defp apply_default_segment_number(%__MODULE__{} = path_map) do 56 | path_map 57 | end 58 | 59 | defp apply_default_repetition(%__MODULE__{field: nil, repetition: r} = path_map) do 60 | if is_nil(r) do 61 | path_map 62 | else 63 | raise ArgumentError, 64 | "HL7.Path cannot contain a repetition without a field or segment number with a segment" 65 | end 66 | end 67 | 68 | defp apply_default_repetition(%__MODULE__{repetition: nil} = path_map) do 69 | Map.put(path_map, :repetition, 1) 70 | end 71 | 72 | defp apply_default_repetition(%__MODULE__{} = path_map) do 73 | path_map 74 | end 75 | 76 | # temporary backwards compatibility data for `HL7.Query` paths, to be deprecated in the future 77 | defp get_data(path, %__MODULE__{} = path_map) do 78 | repetition = 79 | cond do 80 | String.contains?(path, "[") and is_integer(path_map.repetition) -> path_map.repetition - 1 81 | path_map.component || path_map.subcomponent -> 0 82 | true -> nil 83 | end 84 | 85 | m = %__MODULE__{path_map | repetition: repetition} 86 | 87 | indices = 88 | cond do 89 | m.subcomponent -> [m.field, m.repetition, m.component - 1, m.subcomponent - 1] 90 | m.component -> [m.field, m.repetition, m.component - 1] 91 | m.repetition -> [m.field, m.repetition] 92 | m.field -> [m.field] 93 | true -> [] 94 | end 95 | 96 | if m.segment, do: {m.segment, indices}, else: indices 97 | end 98 | end 99 | 100 | defimpl Inspect, for: HL7.Path do 101 | def inspect(%HL7.Path{path: path}, _opts) do 102 | "~p\"" <> path <> "\"" 103 | end 104 | end 105 | -------------------------------------------------------------------------------- /lib/hl7/header.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Header do 2 | @moduledoc """ 3 | An HL7 header implementation that can be used to build MSH segments and HL7 messages. 4 | It is also exposed as metadata after parsing any HL7 message. 5 | """ 6 | 7 | @type t :: %HL7.Header{ 8 | message_type: binary(), 9 | trigger_event: binary(), 10 | sending_facility: binary(), 11 | sending_application: binary(), 12 | message_date_time: String.t(), 13 | security: String.t(), 14 | message_control_id: String.t(), 15 | processing_id: String.t(), 16 | separators: HL7.Separators.t(), 17 | hl7_version: binary() 18 | } 19 | 20 | defstruct message_type: "", 21 | trigger_event: "", 22 | sending_facility: "", 23 | sending_application: "", 24 | receiving_facility: "", 25 | receiving_application: "", 26 | message_date_time: "", 27 | security: "", 28 | message_control_id: "", 29 | processing_id: "", 30 | separators: %HL7.Separators{}, 31 | hl7_version: "" 32 | 33 | @spec new(String.t(), String.t(), String.t(), String.t() | list(), String.t()) :: HL7.Header.t() 34 | def new(message_type, trigger_event, message_control_id, processing_id \\ "P", version \\ "2.1") do 35 | %HL7.Header{ 36 | hl7_version: version, 37 | message_type: message_type, 38 | trigger_event: trigger_event, 39 | message_control_id: message_control_id, 40 | processing_id: processing_id, 41 | message_date_time: get_message_date_time() 42 | } 43 | end 44 | 45 | @spec to_msh(HL7.Header.t()) :: [any(), ...] 46 | def to_msh(%HL7.Header{} = h) do 47 | [ 48 | "MSH", 49 | h.separators.field, 50 | h.separators.encoding_characters, 51 | h.sending_application, 52 | h.sending_facility, 53 | h.receiving_application, 54 | h.receiving_facility, 55 | h.message_date_time, 56 | h.security, 57 | get_message_type_field(h), 58 | h.message_control_id, 59 | h.processing_id, 60 | h.hl7_version 61 | ] 62 | end 63 | 64 | @spec zero_pad(pos_integer(), pos_integer()) :: String.t() 65 | defp zero_pad(num, digits_needed) when is_integer(num) and is_integer(digits_needed) do 66 | string_num = Integer.to_string(num) 67 | pad_size = digits_needed - String.length(string_num) 68 | zeros = String.duplicate("0", pad_size) 69 | zeros <> string_num 70 | end 71 | 72 | @spec get_message_date_time() :: String.t() 73 | def get_message_date_time() do 74 | now = DateTime.utc_now() 75 | 76 | zero_pad(now.year, 4) <> 77 | zero_pad(now.month, 2) <> 78 | zero_pad(now.day, 2) <> 79 | zero_pad(now.hour, 2) <> zero_pad(now.minute, 2) <> zero_pad(now.second, 2) <> "+0000" 80 | end 81 | 82 | @spec get_message_type_field(HL7.Header.t()) :: [any()] 83 | def get_message_type_field(%HL7.Header{} = h) do 84 | v = 85 | case h.hl7_version do 86 | "2.1" -> [h.message_type, h.trigger_event] 87 | "2.2" -> [h.message_type, h.trigger_event] 88 | "2.3" -> [h.message_type, h.trigger_event] 89 | "2.3.1" -> [h.message_type, h.trigger_event] 90 | _ -> [h.message_type, h.trigger_event, h.message_type <> "_" <> h.trigger_event] 91 | end 92 | 93 | [v] 94 | end 95 | end 96 | -------------------------------------------------------------------------------- /lib/hl7/lexers/dynamic.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Lexers.Dynamic do 2 | @moduledoc false 3 | @carriage_return 4 4 | @field 3 5 | @repetition 2 6 | @component 1 7 | @sub_component 0 8 | 9 | def tokenize(text) do 10 | separators = HL7.Separators.new(text) 11 | tokenize(text, separators) 12 | end 13 | 14 | def tokenize( 15 | <<"MSH", _field_and_encoding::binary-size(5), rest::binary>> = text, 16 | %HL7.Separators{truncation_char: ""} = separators 17 | ) do 18 | %{encoding_characters: encoding_characters, field: field} = separators 19 | tokenize(rest, text, 8, 0, [encoding_characters, @field, field, @field, "MSH"], separators) 20 | end 21 | 22 | def tokenize( 23 | <<"MSH", _field_and_encoding::binary-size(6), rest::binary>> = text, 24 | %HL7.Separators{} = separators 25 | ) do 26 | %{encoding_characters: encoding_characters, field: field} = separators 27 | tokenize(rest, text, 9, 0, [encoding_characters, @field, field, @field, "MSH"], separators) 28 | end 29 | 30 | defp tokenize( 31 | <>, 32 | original, 33 | skip, 34 | len, 35 | acc, 36 | %HL7.Separators{field: field} = separators 37 | ) do 38 | tokenize_terminator(rest, original, skip, len, acc, @field, separators) 39 | end 40 | 41 | defp tokenize( 42 | <>, 43 | original, 44 | skip, 45 | len, 46 | acc, 47 | %HL7.Separators{field_repeat: repetition} = separators 48 | ) do 49 | tokenize_terminator(rest, original, skip, len, acc, @repetition, separators) 50 | end 51 | 52 | defp tokenize( 53 | <>, 54 | original, 55 | skip, 56 | len, 57 | acc, 58 | %HL7.Separators{component: component} = separators 59 | ) do 60 | tokenize_terminator(rest, original, skip, len, acc, @component, separators) 61 | end 62 | 63 | defp tokenize( 64 | <>, 65 | original, 66 | skip, 67 | len, 68 | acc, 69 | %HL7.Separators{subcomponent: subcomponent} = separators 70 | ) do 71 | tokenize_terminator(rest, original, skip, len, acc, @sub_component, separators) 72 | end 73 | 74 | defp tokenize(<<"\r", rest::binary>>, original, skip, len, acc, separators) do 75 | tokenize_terminator(rest, original, skip, len, acc, @carriage_return, separators) 76 | end 77 | 78 | defp tokenize(<<"\n", rest::binary>>, original, skip, len, acc, separators) do 79 | tokenize_terminator(rest, original, skip, len, acc, @carriage_return, separators) 80 | end 81 | 82 | defp tokenize(<<_char::binary-size(1), rest::binary>>, original, skip, len, acc, separators) do 83 | tokenize(rest, original, skip, len + 1, acc, separators) 84 | end 85 | 86 | defp tokenize("", _original, _skip, 0, acc, _separators) do 87 | Enum.reverse(acc) 88 | end 89 | 90 | defp tokenize("", original, skip, len, acc, _separators) do 91 | string = binary_part(original, skip, len) 92 | Enum.reverse([string | acc]) 93 | end 94 | 95 | defp tokenize_terminator(text, original, skip, 0, acc, terminator, separators) do 96 | tokenize(text, original, skip + 1, 0, [terminator | acc], separators) 97 | end 98 | 99 | defp tokenize_terminator(text, original, skip, len, acc, terminator, separators) do 100 | string = binary_part(original, skip, len) 101 | tokenize(text, original, skip + len + 1, 0, [terminator, string | acc], separators) 102 | end 103 | 104 | @compile {:inline, tokenize: 6, tokenize_terminator: 7} 105 | end 106 | -------------------------------------------------------------------------------- /lib/hl7/lexers/dynamic_with_copy.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Lexers.DynamicWithCopy do 2 | @moduledoc false 3 | @carriage_return 4 4 | @field 3 5 | @repetition 2 6 | @component 1 7 | @sub_component 0 8 | 9 | def tokenize(text) do 10 | separators = HL7.Separators.new(text) 11 | tokenize(text, separators) 12 | end 13 | 14 | def tokenize( 15 | <<"MSH", _field_and_encoding::binary-size(5), rest::binary>> = text, 16 | %HL7.Separators{truncation_char: ""} = separators 17 | ) do 18 | %{encoding_characters: encoding_characters, field: field} = separators 19 | tokenize(rest, text, 8, 0, [encoding_characters, @field, field, @field, "MSH"], separators) 20 | end 21 | 22 | def tokenize( 23 | <<"MSH", _field_and_encoding::binary-size(6), rest::binary>> = text, 24 | %HL7.Separators{} = separators 25 | ) do 26 | %{encoding_characters: encoding_characters, field: field} = separators 27 | tokenize(rest, text, 9, 0, [encoding_characters, @field, field, @field, "MSH"], separators) 28 | end 29 | 30 | defp tokenize( 31 | <>, 32 | original, 33 | skip, 34 | len, 35 | acc, 36 | %HL7.Separators{field: field} = separators 37 | ) do 38 | tokenize_terminator(rest, original, skip, len, acc, @field, separators) 39 | end 40 | 41 | defp tokenize( 42 | <>, 43 | original, 44 | skip, 45 | len, 46 | acc, 47 | %HL7.Separators{field_repeat: repetition} = separators 48 | ) do 49 | tokenize_terminator(rest, original, skip, len, acc, @repetition, separators) 50 | end 51 | 52 | defp tokenize( 53 | <>, 54 | original, 55 | skip, 56 | len, 57 | acc, 58 | %HL7.Separators{component: component} = separators 59 | ) do 60 | tokenize_terminator(rest, original, skip, len, acc, @component, separators) 61 | end 62 | 63 | defp tokenize( 64 | <>, 65 | original, 66 | skip, 67 | len, 68 | acc, 69 | %HL7.Separators{subcomponent: subcomponent} = separators 70 | ) do 71 | tokenize_terminator(rest, original, skip, len, acc, @sub_component, separators) 72 | end 73 | 74 | defp tokenize(<<"\r", rest::binary>>, original, skip, len, acc, separators) do 75 | tokenize_terminator(rest, original, skip, len, acc, @carriage_return, separators) 76 | end 77 | 78 | defp tokenize(<<"\n", rest::binary>>, original, skip, len, acc, separators) do 79 | tokenize_terminator(rest, original, skip, len, acc, @carriage_return, separators) 80 | end 81 | 82 | defp tokenize(<<_char::binary-size(1), rest::binary>>, original, skip, len, acc, separators) do 83 | tokenize(rest, original, skip, len + 1, acc, separators) 84 | end 85 | 86 | defp tokenize("", _original, _skip, 0, acc, _separators) do 87 | Enum.reverse(acc) 88 | end 89 | 90 | defp tokenize("", original, skip, len, acc, _separators) do 91 | string = binary_part(original, skip, len) |> :binary.copy() 92 | Enum.reverse([string | acc]) 93 | end 94 | 95 | defp tokenize_terminator(text, original, skip, 0, acc, terminator, separators) do 96 | tokenize(text, original, skip + 1, 0, [terminator | acc], separators) 97 | end 98 | 99 | defp tokenize_terminator(text, original, skip, len, acc, terminator, separators) do 100 | string = binary_part(original, skip, len) |> :binary.copy() 101 | tokenize(text, original, skip + len + 1, 0, [terminator, string | acc], separators) 102 | end 103 | 104 | @compile {:inline, tokenize: 6, tokenize_terminator: 7} 105 | end 106 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | ## 0.12.1 2 | - Fixes edge cases to allow lists to be passed in and to handle empty values consistently. 3 | 4 | ## 0.12.0 5 | - Breaking: Updating a field, like `PID-11[*]`, now passes a list of reps instead of a map to be more consistent 6 | 7 | ## 0.11.0 8 | - Breaking: Reverting ! returning string over nil 9 | - Future plans: put this in options for explicit control 10 | 11 | ## 0.10.2 12 | - Fix issues with manipulating repetitions when the target is a plain string 13 | - Changes to soft deprecations to avoid build issues for those avoiding warnings 14 | 15 | ## 0.10.1 16 | - Revert to_list strictness and removal of HL7.parsed_hl7_segments() type 17 | 18 | ## 0.10.0 19 | - Revamped and extended the documentation and tests for the new HL7 API 20 | - Added migration support to easily go between old and new APIs 21 | - Metadata `tags` added to HL7 struct 22 | 23 | ## 0.9.4 24 | - Updated the Put function to handle updating full segments 25 | 26 | ## 0.9.3 27 | - Deprecating HL7.Query and HL7.Message and associated modules 28 | - Docs updates for HL7 29 | 30 | ## 0.9.2 31 | - Bug fix for internal format (caused issues between HL7 and HL7.Message) 32 | 33 | ## 0.9.1 34 | 35 | - Accidentally included nimble_parsec as a hard dependency, removing it. 36 | - Added `keep_prefix_segments` option to `chunk_by_leading_segment/2` 37 | 38 | ## 0.9.0 39 | 40 | - Planning to move all `HL7.Query` functionality to `HL7` module with a map-based API. 41 | - `HL7` module contains new map-based functions (experimentally). 42 | 43 | ## 0.8.0 44 | 45 | - Deprecates `HL7.Query` functions that take `String` in favor of funcitons that take `HL7.Path` 46 | - `get_parts/2` -> `find_all/2` 47 | - `get_part/2` -> `find_first/2` 48 | - `replace_parts/3` -> `update/3` 49 | - Hard coded grammars may use `~p` for compile time checks of grammars 50 | - Grammars coming from the outside can be changed into strings using `HL7.Path.new/1` 51 | - Deprecates `HL7.FieldGrammar.to_indicies/1` use `HL7.Path.new/1` instead 52 | 53 | ## 0.7.3 54 | 55 | - Fixes typespecs on HL7.Query.do_get_part 56 | 57 | ## 0.7.2 58 | 59 | - Fixes warnings for deprecations reporting bad locations 60 | 61 | ## 0.7.1 62 | 63 | - Fixes error when identifiers are used with String values in HL7.Query functions. 64 | 65 | ## 0.7.0 66 | 67 | - Precompiles hard coded HL7.Query strings 68 | - Deprecates passing Strings as arguments to HL7.Query funcitons 69 | - Adds sigil to create paths `~g{PID-3.1}` 70 | 71 | ## 0.6.6 72 | 73 | - Added optional options to `HL7.Message.new` to support string validation, binary copies, and latin1 encodings. 74 | 75 | ## 0.6.5 76 | 77 | - Using binary copy while parsing to minimize shared binary references 78 | 79 | ## 0.6.4 80 | 81 | - Performance increase with new tokenizer and hot path for default message delimiters -- up to 6x faster parsing now! 82 | 83 | ## 0.6.1 84 | 85 | Added wildcard selections, such as `HL7.Query.select(message, "OBR*")` to select segment types with all trailing 86 | segments until the lead segment is encountered again. Also includes `!NTE` to match any segment but an NTE segment. 87 | 88 | ## 0.6.0 89 | 90 | Changes -- (looser parsing rules, next version will add warnings field) 91 | 92 | - HL7.Message will successfully parse messages without an HL7 Version (== nil) 93 | - HL7.Message now contains a fragments field for invalid segment data (empty list normally) 94 | 95 | ## 0.5.0 96 | 97 | Bug Fixes 98 | 99 | - Hl7.Query.get_part was not returning multiple repetitions 100 | - Roadmap: will add an "invalid query" struct similar to invalid message 101 | 102 | 103 | ## 0.4.0 104 | 105 | Bug Fixes (that could change expectations, thus version bump) 106 | 107 | - Messages with certain invalid headers will produce InvalidMessage and InvalidHeader structs instead of crashing. 108 | - Messages that do not specify encoding characters will use the HL7 default. 109 | - Messages that do not include Trigger Events will now act as "correct" HL7 110 | - Roadmap: will include a warnings list for parsed messages such that systems can decide on an appropriate strictness level. -------------------------------------------------------------------------------- /test/hl7/grammar_test.exs: -------------------------------------------------------------------------------- 1 | defmodule HL7.GrammarTest do 2 | use ExUnit.Case 3 | require Logger 4 | doctest HL7.SegmentGrammar 5 | import HL7.SegmentGrammar 6 | 7 | test "single segment grammar" do 8 | g = new("OBX") 9 | assert g.children == ["OBX"] 10 | assert g.optional == false 11 | assert g.repeating == false 12 | end 13 | 14 | test "multi segment grammar" do 15 | g = new("OBX AL1") 16 | assert g.children == ["OBX", "AL1"] 17 | assert g.optional == false 18 | assert g.repeating == false 19 | end 20 | 21 | test "fully optional grammar is invalid" do 22 | g = new("[OBX] [AL1 {[PV1 OBR]} NTE]") 23 | assert %HL7.InvalidGrammar{} = g 24 | assert :no_required_segments = g.reason 25 | end 26 | 27 | test "bad tokens are invalid" do 28 | g = new("{OBX} - {AL1 [{PV1 OBR}] NTE}") 29 | assert %HL7.InvalidGrammar{} = g 30 | assert :invalid_token = g.reason 31 | end 32 | 33 | test "optional child grammar" do 34 | g = new("OBX [AL1]") 35 | 36 | assert %HL7.SegmentGrammar{} = g 37 | assert ["OBX" | _] = g.children 38 | 39 | ["OBX", inner_child | _] = g.children 40 | 41 | assert %HL7.SegmentGrammar{} = inner_child 42 | assert ["AL1"] = inner_child.children 43 | assert true == inner_child.optional 44 | 45 | assert %HL7.SegmentGrammar{ 46 | children: [ 47 | "OBX", 48 | %HL7.SegmentGrammar{ 49 | children: ["AL1"], 50 | optional: true, 51 | repeating: false 52 | } 53 | ], 54 | optional: false, 55 | repeating: false 56 | } == g 57 | end 58 | 59 | test "repeating child grammar" do 60 | g = new("OBX {AL1}") 61 | 62 | assert %HL7.SegmentGrammar{} = g 63 | assert ["OBX" | _] = g.children 64 | 65 | ["OBX", inner_child | _] = g.children 66 | 67 | assert %HL7.SegmentGrammar{} = inner_child 68 | assert ["AL1"] = inner_child.children 69 | assert true == inner_child.repeating 70 | 71 | assert %HL7.SegmentGrammar{ 72 | children: [ 73 | "OBX", 74 | %HL7.SegmentGrammar{ 75 | children: ["AL1"], 76 | optional: false, 77 | repeating: true 78 | } 79 | ], 80 | optional: false, 81 | repeating: false 82 | } == g 83 | end 84 | 85 | test "repeating and optional child grammar" do 86 | g = new("OBX {[AL1]}") 87 | 88 | assert %HL7.SegmentGrammar{} = g 89 | assert ["OBX" | _] = g.children 90 | 91 | ["OBX", inner_child | _] = g.children 92 | 93 | assert %HL7.SegmentGrammar{} = inner_child 94 | assert [%HL7.SegmentGrammar{}] = inner_child.children 95 | assert true == inner_child.repeating 96 | 97 | assert %HL7.SegmentGrammar{ 98 | children: [ 99 | "OBX", 100 | %HL7.SegmentGrammar{ 101 | children: [ 102 | %HL7.SegmentGrammar{ 103 | children: ["AL1"], 104 | optional: true, 105 | repeating: false 106 | } 107 | ], 108 | optional: false, 109 | repeating: true 110 | } 111 | ], 112 | optional: false, 113 | repeating: false 114 | } == g 115 | end 116 | 117 | test "nested grammar" do 118 | g = new("OBX {NTE {[AL1]}}") 119 | 120 | assert %HL7.SegmentGrammar{ 121 | children: [ 122 | "OBX", 123 | %HL7.SegmentGrammar{ 124 | children: [ 125 | "NTE", 126 | %HL7.SegmentGrammar{ 127 | children: [ 128 | %HL7.SegmentGrammar{children: ["AL1"], optional: true, repeating: false} 129 | ], 130 | optional: false, 131 | repeating: true 132 | } 133 | ], 134 | optional: false, 135 | repeating: true 136 | } 137 | ], 138 | optional: false, 139 | repeating: false 140 | } == g 141 | end 142 | end 143 | -------------------------------------------------------------------------------- /lib/hl7/segment_grammar.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.SegmentGrammar do 2 | require Logger 3 | 4 | @type t :: %HL7.SegmentGrammar{ 5 | children: list(String.t() | t()), 6 | optional: boolean(), 7 | repeating: boolean() 8 | } 9 | 10 | @type grammar_result :: HL7.SegmentGrammar.t() | HL7.InvalidGrammar.t() 11 | 12 | @moduledoc false 13 | 14 | defstruct children: [], 15 | optional: false, 16 | repeating: false 17 | 18 | # Selector strings should be HL7 segment grammar strings, e.g., "OBR {EVN} [{OBX [{NTE}]}]" 19 | 20 | @spec new(String.t()) :: grammar_result() 21 | def new(schema) do 22 | chunks = chunk_schema(schema) 23 | {g, _tail} = build_grammar(%HL7.SegmentGrammar{}, schema, chunks) 24 | 25 | with %HL7.SegmentGrammar{} <- g do 26 | case has_required_children?(g) do 27 | true -> g 28 | false -> HL7.InvalidGrammar.no_required_segments() 29 | end 30 | end 31 | end 32 | 33 | @spec has_required_children?(HL7.SegmentGrammar.t()) :: boolean() 34 | def has_required_children?(%HL7.SegmentGrammar{} = g) do 35 | check_for_non_optional_children(g) 36 | end 37 | 38 | @spec check_for_non_optional_children(HL7.SegmentGrammar.t()) :: boolean() 39 | defp check_for_non_optional_children(%HL7.SegmentGrammar{optional: true}) do 40 | false 41 | end 42 | 43 | defp check_for_non_optional_children(%HL7.SegmentGrammar{children: children, optional: false}) do 44 | Enum.find( 45 | children, 46 | fn g -> 47 | case g do 48 | <<_::binary-size(3)>> -> 49 | true 50 | 51 | _ -> 52 | check_for_non_optional_children(g) 53 | end 54 | end 55 | ) 56 | |> case do 57 | nil -> false 58 | false -> false 59 | _ -> true 60 | end 61 | end 62 | 63 | defp add_child(%HL7.SegmentGrammar{} = grammar, child) do 64 | %{grammar | children: [child | grammar.children]} 65 | end 66 | 67 | defp close_bracket(%HL7.SegmentGrammar{} = grammar) do 68 | %{grammar | children: Enum.reverse(grammar.children)} 69 | end 70 | 71 | @spec build_grammar(HL7.SegmentGrammar.t(), String.t(), [String.t()]) :: 72 | {HL7.SegmentGrammar.t() | HL7.InvalidGrammar.invalid_token(), [String.t()]} 73 | defp build_grammar(%HL7.SegmentGrammar{} = grammar, schema, [chunk | tail] = tokens) do 74 | case chunk do 75 | "{" -> 76 | {g, chunks} = build_grammar(%HL7.SegmentGrammar{repeating: true}, schema, tail) 77 | 78 | case g do 79 | %HL7.InvalidGrammar{} -> 80 | {g, tokens} 81 | 82 | %HL7.SegmentGrammar{} -> 83 | grammar 84 | |> add_child(g) 85 | |> build_grammar(schema, chunks) 86 | end 87 | 88 | "[" -> 89 | {g, chunks} = build_grammar(%HL7.SegmentGrammar{optional: true}, schema, tail) 90 | 91 | case g do 92 | %HL7.InvalidGrammar{} -> 93 | {g, tokens} 94 | 95 | %HL7.SegmentGrammar{} -> 96 | grammar 97 | |> add_child(g) 98 | |> build_grammar(schema, chunks) 99 | end 100 | 101 | " " -> 102 | build_grammar(grammar, schema, tail) 103 | 104 | closing_bracket when closing_bracket in ~w(} ]) -> 105 | {close_bracket(grammar), tail} 106 | 107 | <<"!", tag::binary-size(3)>> -> 108 | not_tag = "!" <> tag 109 | 110 | wildcard = %HL7.SegmentGrammar{ 111 | repeating: true, 112 | children: [ 113 | %HL7.SegmentGrammar{optional: true, children: [not_tag]} 114 | ] 115 | } 116 | 117 | g = %{grammar | children: [wildcard | grammar.children]} 118 | build_grammar(g, schema, tail) 119 | 120 | <> -> 121 | not_tag = "!" <> tag 122 | 123 | wildcard = %HL7.SegmentGrammar{ 124 | repeating: true, 125 | children: [ 126 | %HL7.SegmentGrammar{optional: true, children: [not_tag]} 127 | ] 128 | } 129 | 130 | grammar 131 | |> add_child(tag) 132 | |> add_child(wildcard) 133 | |> build_grammar(schema, tail) 134 | 135 | <> -> 136 | grammar 137 | |> add_child(tag) 138 | |> build_grammar(schema, tail) 139 | 140 | _invalid_token -> 141 | {HL7.InvalidGrammar.invalid_token(chunk, schema), []} 142 | end 143 | end 144 | 145 | defp build_grammar(%HL7.SegmentGrammar{} = grammar, _schema, []) do 146 | {%HL7.SegmentGrammar{grammar | children: Enum.reverse(grammar.children)}, []} 147 | end 148 | 149 | @spec chunk_schema(String.t()) :: [String.t()] 150 | def chunk_schema(schema) do 151 | Regex.split(~r{(\{|\[|\}|\]|\s)}, schema, include_captures: true, trim: true) 152 | end 153 | end 154 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: ci 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - master 8 | pull_request: 9 | types: [opened, synchronize] 10 | 11 | env: 12 | MIX_ENV: test 13 | otp-version: '26' 14 | elixir-version: '1.18' 15 | cache-version: '1' 16 | 17 | jobs: 18 | build: 19 | strategy: 20 | matrix: 21 | otp: ['25', '26', '27'] 22 | elixir: ['1.18'] 23 | name: Build 24 | runs-on: ubuntu-latest 25 | steps: 26 | - name: checkout 27 | uses: actions/checkout@v4 28 | - name: setup 29 | uses: erlef/setup-beam@v1 30 | with: 31 | otp-version: ${{ matrix.otp }} 32 | elixir-version: ${{ matrix.elixir }} 33 | - name: restore-deps 34 | uses: actions/cache@v4 35 | with: 36 | path: deps 37 | key: ${{ runner.os }}-mix-${{ matrix.otp }}-${{ matrix.elixir }}-${{ env.cache-version }}-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} 38 | - name: restore-build 39 | uses: actions/cache@v4 40 | with: 41 | path: _build 42 | key: ${{ runner.os }}-build-${{ matrix.otp }}-${{ matrix.elixir }}-${{ env.cache-version }}-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} 43 | - name: deps.get 44 | run: mix deps.get 45 | - name: compile 46 | run: | 47 | mix compile --warnings-as-errors 48 | 49 | format: 50 | name: Format 51 | needs: build 52 | runs-on: ubuntu-latest 53 | steps: 54 | - name: checkout 55 | uses: actions/checkout@v4 56 | - name: setup 57 | uses: erlef/setup-beam@v1 58 | with: 59 | otp-version: ${{ env.otp-version }} 60 | elixir-version: ${{ env.elixir-version }} 61 | - name: restore-deps 62 | uses: actions/cache@v4 63 | with: 64 | path: deps 65 | key: ${{ runner.os }}-mix-${{ env.otp-version }}-${ env.elixir-version }}-${{ env.cache-version }}-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} 66 | - name: restore-build 67 | uses: actions/cache@v4 68 | with: 69 | path: _build 70 | key: ${{ runner.os }}-build-${{ env.otp-version }}-${{ env.elixir-version }}-${{ env.cache-version }}-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} 71 | - name: format 72 | run: mix format --check-formatted 73 | 74 | dialyzer: 75 | needs: build 76 | strategy: 77 | matrix: 78 | otp: ['25', '26', '27'] 79 | elixir: ['1.18'] 80 | name: Dialyzer 81 | runs-on: ubuntu-latest 82 | steps: 83 | - name: checkout 84 | uses: actions/checkout@v4 85 | - name: setup 86 | uses: erlef/setup-beam@v1 87 | with: 88 | otp-version: ${{ matrix.otp }} 89 | elixir-version: ${{ matrix.elixir }} 90 | - name: restore-deps 91 | uses: actions/cache@v4 92 | with: 93 | path: deps 94 | key: ${{ runner.os }}-mix-${{ matrix.otp }}-${{ matrix.elixir }}-${{ env.cache-version }}-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} 95 | - name: restore-build 96 | uses: actions/cache@v4 97 | with: 98 | path: _build 99 | key: ${{ runner.os }}-build-${{ matrix.otp }}-${{ matrix.elixir }}-${{ env.cache-version }}-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} 100 | - name: restore-plts 101 | uses: actions/cache@v4 102 | with: 103 | path: priv/plts 104 | key: ${{ runner.os }}-hl7-dialyzer-${{ matrix.otp }}-${{ matrix.elixir }}-${{ env.cache-version }}-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }}-${{ hashFiles(format('{0}{1}', github.workspace, '/priv/plts/hl7.plt')) }} 105 | - name: dialyze 106 | run: mix dialyzer --force-check 107 | 108 | test: 109 | needs: build 110 | strategy: 111 | matrix: 112 | otp: ['25', '26', '27'] 113 | elixir: ['1.18'] 114 | name: Test 115 | runs-on: ubuntu-latest 116 | steps: 117 | - name: checkout 118 | uses: actions/checkout@v4 119 | - name: setup 120 | uses: erlef/setup-beam@v1 121 | with: 122 | otp-version: ${{ matrix.otp }} 123 | elixir-version: ${{ matrix.elixir }} 124 | - name: restore-deps 125 | uses: actions/cache@v4 126 | with: 127 | path: deps 128 | key: ${{ runner.os }}-mix-${{ matrix.otp }}-${{ matrix.elixir }}-${{ env.cache-version }}-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} 129 | - name: restore-build 130 | uses: actions/cache@v4 131 | with: 132 | path: _build 133 | key: ${{ runner.os }}-build-${{ matrix.otp }}-${{ matrix.elixir }}-${{ env.cache-version }}-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} 134 | - name: test 135 | run: mix test 136 | -------------------------------------------------------------------------------- /main.md: -------------------------------------------------------------------------------- 1 | # Elixir HL7 2 | 3 | [![hex.pm version](https://img.shields.io/hexpm/v/elixir_hl7.svg)](https://hex.pm/packages/elixir_hl7) 4 | [![hex.pm downloads](https://img.shields.io/hexpm/dt/elixir_hl7.svg)](https://hex.pm/packages/elixir_hl7) 5 | [![hex.pm license](https://img.shields.io/hexpm/l/elixir_hl7.svg)](https://hex.pm/packages/elixir_hl7) 6 | 7 | An Elixir library for working with HL7 v2.x healthcare data. 8 | 9 | Elixir HL7 provides functions to parse, query and modify healthcare data that conforms to the HL7 v2.x standards. 10 | It should be able to reconstruct any HL7 message without data loss or corruption. 11 | 12 | It also provides basic support for reading HL7 file streams with configurable delimiters (MLLP included). 13 | 14 | This library has been tested on a wide variety of real-world HL7 messages to ensure correctness and flexibility. 15 | 16 | Since many HL7 messages do not strictly conform to the standards specified for each version, the library does not 17 | attempt to enforce limits such as character counts or structural expectations. 18 | 19 | In fact, HL7 uses implicit hierarchies within segments (by leaving out certain separators) and to group segments 20 | (via expected patterns only known to the consuming application). 21 | 22 | The majority of the API currently resides in the eponymous `HL7` module. 23 | 24 | You can learn more about HL7 here: 25 | * Wikipedia's [HL7 article](https://en.wikipedia.org/wiki/Health_Level_7) 26 | * The official HL7 website ([hl7.org](http://www.hl7.org/index.cfm)) 27 | 28 | Please [report an issue](https://github.com/HCA-Healthcare/elixir-hl7/issues) if something appears to be handled incorrectly. 29 | 30 | > ### New HL7 API {: .warning} 31 | > 32 | > The new `HL7` module API replaces the `HL7.Query`, `HL7.Segment`, and `HL7.Message` modules. 33 | > 34 | > These will likely not be removed for some time, and their 35 | > removal will coincide with a major version release. 36 | > 37 | > For now, the two systems can exchange data when needed. 38 | > See [here](./HL7.html##module-migrating-from-hl7-message-hl7-segment-and-hl7-query) for details! 39 | 40 | ## History 41 | 42 | We originally downloaded the HL7 specifications and generated structs to represent all possible message variants. 43 | Unfortunately, thousands of vendors and hospitals do NOT actually follow these specifications. 44 | It turned out to be a fool's errand. 45 | 46 | We then created a library designed to loosely parse and manipulate HL7 documents. This worked quite well, but as 47 | it took inspiration from jQuery and D3js, it did not mesh well with canonical Elixir. 48 | 49 | This is the third approach to this library. We've attempted to hew closely to the HL7 business domain terminology 50 | while also simplifying the API such that its data structures are more compatible with core Elixir modules like Map and Enum. 51 | 52 | ## Installation 53 | 54 | Add this library to your mix.exs file. 55 | 56 | ```elixir 57 | defp deps do 58 | [{:elixir_hl7, "~> 0.10.1"}] 59 | end 60 | ``` 61 | 62 | ## Examples 63 | 64 | The `HL7.Examples` module contains functions with sample data that you can use to explore the API. 65 | 66 | iex> import HL7, only: :sigils 67 | iex> HL7.Examples.wikipedia_sample_hl7() 68 | ...> |> HL7.new!() 69 | ...> |> HL7.get(~p"MSH-9.1") 70 | "ADT" 71 | 72 | ## Parse 73 | 74 | HL7 messages can be fully parsed into lists of sparse maps and strings to provide a compact representation 75 | of the underlying message structure. 76 | 77 | Use `HL7.new!/2` to parse raw HL7 into a struct and `HL7.get_segments/1` to view the parsed data. 78 | 79 | iex> HL7.Examples.wikipedia_sample_hl7() 80 | ...> |> HL7.new!() 81 | ...> |> HL7.get_segments() 82 | ...> |> Enum.at(1) 83 | %{0 => "EVN", 2 => "200605290901", 6 => "200605290900"} 84 | 85 | ## Query 86 | 87 | Use `HL7.get/2` with the `HL7.Path` struct (created using `HL7.sigil_p/2`) to query HL7 data. 88 | 89 | iex> import HL7, only: :sigils 90 | iex> HL7.Examples.nist_immunization_hl7() 91 | ...> |> HL7.new!() 92 | ...> |> HL7.get(~p"RXA-5.2") 93 | "Influenza" 94 | 95 | ## Modify 96 | 97 | Modify the data within messages, segments or repetitions using `HL7.put/3` and `HL7.update/4`. 98 | Use the `HL7.Path` struct (created using `HL7.sigil_p/2`) to specify what to change. 99 | 100 | iex> HL7.Examples.wikipedia_sample_hl7() 101 | ...> |> HL7.new!() 102 | ...> |> HL7.put(~p"OBX[2]-5", "44") 103 | ...> |> HL7.get(~p"OBX[*]-5") 104 | ["1.80", "44"] 105 | 106 | ## Create 107 | 108 | Use `HL7.new/2`, `HL7.new_segment/1` and `HL7.set_segments/2` to build HL7 messages from scratch. 109 | Note: This API is currently being developed and extended. 110 | 111 | ## Files 112 | 113 | The `HL7` module contains utility functions to open file streams of HL7 message content with support for MLLP and standard `:line` storage. 114 | Other formats are somewhat supported by specifying expected prefix and suffix delimiters between messages. 115 | 116 | ## Networking 117 | 118 | A separate library, Elixir-MLLP, exists to manage MLLP connections. MLLP is a simple protocol on top of TCP that is commonly used for sending and receiving HL7 messages. 119 | 120 | # License 121 | 122 | Elixir HL7 source code is released under Apache 2 License. Check the LICENSE file for more information. 123 | -------------------------------------------------------------------------------- /test/hl7/message_props_test.exs: -------------------------------------------------------------------------------- 1 | defmodule HL7.MessagePropsTest do 2 | use ExUnit.Case 3 | use PropCheck 4 | 5 | property "parses HL7 message and to_strings back the same" do 6 | numtests( 7 | 200, 8 | forall msg <- message() do 9 | msg 10 | |> HL7.Message.new() 11 | |> to_string() == msg 12 | end 13 | ) 14 | end 15 | 16 | property "parses a segment for each line in message" do 17 | forall msg <- message() do 18 | line_count = 19 | msg 20 | |> String.split("\r") 21 | |> length() 22 | 23 | msg 24 | |> HL7.Message.new() 25 | |> (fn m -> m.segments end).() 26 | |> length() == line_count - 1 27 | end 28 | end 29 | 30 | # Helpers 31 | 32 | defp message() do 33 | let separators <- oneof([separators(), separators(6)]) do 34 | let {msh_segment, segments} <- { 35 | msh(separators), 36 | zero_or_more(segment(separators)) 37 | } do 38 | ([msh_segment | segments] ++ [""]) 39 | |> Enum.join("\r") 40 | end 41 | end 42 | end 43 | 44 | defp separators(length \\ 5) do 45 | let seps <- such_that(seps <- list_of(separator(), length), when: unique?(seps)) do 46 | seps |> to_string 47 | end 48 | end 49 | 50 | defp unique?(list) do 51 | list 52 | |> MapSet.new() 53 | |> MapSet.size() == length(list) 54 | end 55 | 56 | defp separator() do 57 | # printable characters excluding capital letters and digits 58 | oneof([ 59 | range(33, 47), 60 | range(58, 64), 61 | range(91, 126) 62 | ]) 63 | end 64 | 65 | defp msh(separators) do 66 | field_separator = String.first(separators) 67 | 68 | let {fields6, message_type, fields4} <- 69 | tuple([ 70 | list_of(field(separators), 6), 71 | message_type(separators), 72 | list_of(field(separators), 4) 73 | ]) do 74 | joined = 75 | fields6 76 | |> Enum.concat([message_type | fields4]) 77 | |> Enum.join(field_separator) 78 | 79 | "MSH#{separators}#{field_separator}#{joined}" 80 | end 81 | end 82 | 83 | defp segment(separators) do 84 | field_separator = String.first(separators) 85 | 86 | let {name, strings} <- {segment_name(), fields(separators)} do 87 | "#{name}#{field_separator}#{strings}" 88 | end 89 | end 90 | 91 | defp fields(separators) do 92 | gen_value(separators, 0, field(separators)) 93 | end 94 | 95 | defp field(separators) do 96 | gen_value(separators, 2, repetition(separators)) 97 | end 98 | 99 | defp repetition(separators) do 100 | gen_value(separators, 1, component(separators)) 101 | end 102 | 103 | defp component(separators) do 104 | gen_value(separators, 4, safe_string(separators)) 105 | end 106 | 107 | defp gen_value(separators, separator_index, subgen) do 108 | separator = separators |> String.at(separator_index) 109 | 110 | let strings <- zero_or_more(subgen) do 111 | strings 112 | |> Enum.join(separator) 113 | end 114 | end 115 | 116 | defp message_type(separators) do 117 | component_separator = separators |> String.at(1) 118 | repetition_separator = separators |> String.at(2) 119 | 120 | let {components, repetitions} <- 121 | tuple([ 122 | two_or_more(component(separators)), 123 | zero_or_more(repetition(separators)) 124 | ]) do 125 | first_rep = components |> Enum.join(component_separator) 126 | 127 | [first_rep | repetitions] 128 | |> Enum.join(repetition_separator) 129 | end 130 | end 131 | 132 | defp zero_or_more(gen) do 133 | let count <- range(1, 10) do 134 | let tup <- oneof([tuple(list_of(gen, count)), {}]) do 135 | tup 136 | |> Tuple.to_list() 137 | end 138 | end 139 | end 140 | 141 | defp two_or_more(gen) do 142 | let count <- range(2, 10) do 143 | let tup <- tuple(list_of(gen, count)) do 144 | tup 145 | |> Tuple.to_list() 146 | end 147 | end 148 | end 149 | 150 | defp list_of(value, n), do: 1..n |> Enum.map(fn _ -> value end) 151 | 152 | defp segment_name() do 153 | let tup <- tuple([capital(), capital_or_digit(), capital_or_digit()]) do 154 | tup 155 | |> Tuple.to_list() 156 | |> Enum.join("") 157 | end 158 | end 159 | 160 | defp capital() do 161 | let char <- range(65, 90) do 162 | [char] |> to_string 163 | end 164 | end 165 | 166 | defp digit() do 167 | let char <- range(48, 57) do 168 | [char] |> to_string 169 | end 170 | end 171 | 172 | defp capital_or_digit() do 173 | oneof([capital(), digit()]) 174 | end 175 | 176 | defp safe_string(separators) do 177 | let chars <- list(range(32, 126)) do 178 | chars 179 | |> to_string() 180 | |> escape_separators(separators) 181 | end 182 | end 183 | 184 | defp escape_separators(str, separators) do 185 | escape = separators |> String.at(3) 186 | 187 | separators 188 | |> String.to_charlist() 189 | |> Enum.map(fn char -> [char] |> to_string end) 190 | |> Enum.zip(~w(F S R E T)) 191 | |> Enum.reduce(str, fn {separator, replacement}, acc -> 192 | String.replace(acc, separator, "#{escape}#{replacement}#{escape}") 193 | end) 194 | end 195 | end 196 | -------------------------------------------------------------------------------- /mix.lock: -------------------------------------------------------------------------------- 1 | %{ 2 | "argparse": {:hex, :argparse, "1.2.4", "a31e7c5d9f8814afcd9b42c1b98c21da6f851f93f2c8c00c107f6201668a0a7d", [:rebar3], [], "hexpm", "ac6fdb7183ea20adeb7eb66e34b21f2e8c4c6925913ee0c0765d339d97009ffe"}, 3 | "benchee": {:hex, :benchee, "1.1.0", "f3a43817209a92a1fade36ef36b86e1052627fd8934a8b937ac9ab3a76c43062", [:mix], [{:deep_merge, "~> 1.0", [hex: :deep_merge, repo: "hexpm", optional: false]}, {:statistex, "~> 1.0", [hex: :statistex, repo: "hexpm", optional: false]}], "hexpm", "7da57d545003165a012b587077f6ba90b89210fd88074ce3c60ce239eb5e6d93"}, 4 | "benchfella": {:hex, :benchfella, "0.3.5", "b2122c234117b3f91ed7b43b6e915e19e1ab216971154acd0a80ce0e9b8c05f5", [:mix], [], "hexpm"}, 5 | "deep_merge": {:hex, :deep_merge, "1.0.0", "b4aa1a0d1acac393bdf38b2291af38cb1d4a52806cf7a4906f718e1feb5ee961", [:mix], [], "hexpm", "ce708e5f094b9cd4e8f2be4f00d2f4250c4095be93f8cd6d018c753894885430"}, 6 | "dialyxir": {:hex, :dialyxir, "1.4.1", "a22ed1e7bd3a3e3f197b68d806ef66acb61ee8f57b3ac85fc5d57354c5482a93", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "84b795d6d7796297cca5a3118444b80c7d94f7ce247d49886e7c291e1ae49801"}, 7 | "earmark": {:hex, :earmark, "1.3.1", "73812f447f7a42358d3ba79283cfa3075a7580a3a2ed457616d6517ac3738cb9", [:mix], [], "hexpm", "000aaeff08919e95e7aea13e4af7b2b9734577b3e6a7c50ee31ee88cab6ec4fb"}, 8 | "earmark_parser": {:hex, :earmark_parser, "1.4.44", "f20830dd6b5c77afe2b063777ddbbff09f9759396500cdbe7523efd58d7a339c", [:mix], [], "hexpm", "4778ac752b4701a5599215f7030989c989ffdc4f6df457c5f36938cc2d2a2750"}, 9 | "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, 10 | "erlperf": {:hex, :erlperf, "2.2.0", "9841d71f0aa0ee344ba3fba919563362f7a64a96cc61c4e38f1a28e941162a4c", [:rebar3], [{:argparse, "~> 1.2.4", [hex: :argparse, repo: "hexpm", optional: false]}], "hexpm", "b627ca998b07f78fbb0b7da17c0537ae87730c520e1961c3633f9496b3729973"}, 11 | "ex_doc": {:hex, :ex_doc, "0.38.2", "504d25eef296b4dec3b8e33e810bc8b5344d565998cd83914ffe1b8503737c02", [:mix], [{:earmark_parser, "~> 1.4.44", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "732f2d972e42c116a70802f9898c51b54916e542cc50968ac6980512ec90f42b"}, 12 | "excoveralls": {:hex, :excoveralls, "0.18.0", "b92497e69465dc51bc37a6422226ee690ab437e4c06877e836f1c18daeb35da9", [:mix], [{:castore, "~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1109bb911f3cb583401760be49c02cbbd16aed66ea9509fc5479335d284da60b"}, 13 | "jason": {:hex, :jason, "1.4.1", "af1504e35f629ddcdd6addb3513c3853991f694921b1b9368b0bd32beb9f1b63", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "fbb01ecdfd565b56261302f7e1fcc27c4fb8f32d56eab74db621fc154604a7a1"}, 14 | "junit_formatter": {:hex, :junit_formatter, "3.3.1", "c729befb848f1b9571f317d2fefa648e9d4869befc4b2980daca7c1edc468e40", [:mix], [], "hexpm", "761fc5be4b4c15d8ba91a6dafde0b2c2ae6db9da7b8832a55b5a1deb524da72b"}, 15 | "libgraph": {:hex, :libgraph, "0.13.3", "20732b7bafb933dcf7351c479e03076ebd14a85fd3202c67a1c197f4f7c2466b", [:mix], [], "hexpm", "78f2576eef615440b46f10060b1de1c86640441422832052686df53dc3c148c6"}, 16 | "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"}, 17 | "makeup_elixir": {:hex, :makeup_elixir, "1.0.1", "e928a4f984e795e41e3abd27bfc09f51db16ab8ba1aebdba2b3a575437efafc2", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "7284900d412a3e5cfd97fdaed4f5ed389b8f2b4cb49efc0eb3bd10e2febf9507"}, 18 | "makeup_erlang": {:hex, :makeup_erlang, "1.0.2", "03e1804074b3aa64d5fad7aa64601ed0fb395337b982d9bcf04029d68d51b6a7", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "af33ff7ef368d5893e4a267933e7744e46ce3cf1f61e2dccf53a111ed3aa3727"}, 19 | "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"}, 20 | "propcheck": {:hex, :propcheck, "1.4.1", "c12908dbe6f572032928548089b34ff9d40672d5d70f1562e3a9e9058d226cc9", [:mix], [{:libgraph, "~> 0.13", [hex: :libgraph, repo: "hexpm", optional: false]}, {:proper, "~> 1.4", [hex: :proper, repo: "hexpm", optional: false]}], "hexpm", "e1b088f574785c3c7e864da16f39082d5599b3aaf89086d3f9be6adb54464b19"}, 21 | "proper": {:hex, :proper, "1.4.0", "89a44b8c39d28bb9b4be8e4d715d534905b325470f2e0ec5e004d12484a79434", [:rebar3], [], "hexpm", "18285842185bd33efbda97d134a5cb5a0884384db36119fee0e3cfa488568cbb"}, 22 | "statistex": {:hex, :statistex, "1.0.0", "f3dc93f3c0c6c92e5f291704cf62b99b553253d7969e9a5fa713e5481cd858a5", [:mix], [], "hexpm", "ff9d8bee7035028ab4742ff52fc80a2aa35cece833cf5319009b52f1b5a86c27"}, 23 | "stream_data": {:hex, :stream_data, "0.4.2", "fa86b78c88ec4eaa482c0891350fcc23f19a79059a687760ddcf8680aac2799b", [:mix], [], "hexpm"}, 24 | } 25 | -------------------------------------------------------------------------------- /lib/hl7/segment.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Segment do 2 | require Logger 3 | 4 | @moduledoc """ 5 | Filter, parse and modify individual HL7 segments and their fragments. 6 | """ 7 | 8 | @moduledoc deprecated: "Use `HL7` instead" 9 | 10 | @type raw_hl7 :: String.t() | HL7.RawMessage.t() 11 | @type fragment_hl7 :: String.t() | [list() | String.t()] 12 | @type segment_hl7 :: [fragment_hl7()] 13 | @type parsed_hl7 :: [segment_hl7()] | HL7.Message.t() 14 | @type content_hl7 :: raw_hl7() | parsed_hl7() 15 | 16 | @doc ~S""" 17 | Updates content within a parsed HL7 segment, returning a modified segment whose data has been transformed at the given 18 | indices (starting at 1 as with HL7's convention). The `transform` can be either a `string`, `list` or `fn old_data -> new_data`. 19 | """ 20 | 21 | @spec replace_part( 22 | segment_hl7(), 23 | fragment_hl7() | (fragment_hl7() -> fragment_hl7()), 24 | pos_integer(), 25 | pos_integer() | nil, 26 | pos_integer() | nil, 27 | pos_integer() | nil 28 | ) :: segment_hl7() | String.t() 29 | 30 | def replace_part( 31 | segment, 32 | transform, 33 | field, 34 | repetition \\ nil, 35 | component \\ nil, 36 | subcomponent \\ nil 37 | ) 38 | 39 | def replace_part(segment, transform, field, repetition, component, subcomponent) 40 | when is_integer(field) and is_function(transform, 1) do 41 | indices = to_indices(field, repetition, component, subcomponent) 42 | replace_fragment(segment, indices, transform, true) 43 | end 44 | 45 | def replace_part(segment, transform, field, repetition, component, subcomponent) 46 | when is_integer(field) and (is_binary(transform) or is_list(transform)) do 47 | indices = to_indices(field, repetition, component, subcomponent) 48 | replace_fragment(segment, indices, fn _data -> transform end, true) 49 | end 50 | 51 | @doc false 52 | 53 | # used by HL7.Message to update segments with list-based indices 54 | 55 | @spec replace_fragment(list() | String.t(), list(), function(), boolean()) :: 56 | list() | String.t() 57 | def replace_fragment(data, [], transform, is_field) when is_function(transform, 1) do 58 | transform.(data) |> unwrap_binary_field(is_field) 59 | end 60 | 61 | def replace_fragment(data, [i | remaining_indices], transform, is_field) 62 | when is_binary(data) and is_integer(i) and is_function(transform, 1) do 63 | [data | empty_string_list(i)] 64 | |> List.update_at(0, fn d -> 65 | replace_fragment(d, remaining_indices, transform, false) |> unwrap_binary_field(is_field) 66 | end) 67 | |> Enum.reverse() 68 | end 69 | 70 | def replace_fragment(data, [i | remaining_indices], transform, is_field) 71 | when is_list(data) and is_integer(i) and is_function(transform, 1) do 72 | count = Enum.count(data) 73 | 74 | case i < count do 75 | true -> 76 | List.update_at(data, i, fn d -> 77 | replace_fragment(d, remaining_indices, transform, false) 78 | |> unwrap_binary_field(is_field) 79 | end) 80 | 81 | false -> 82 | data 83 | |> Enum.reverse() 84 | |> empty_string_list(i - count + 1) 85 | |> List.update_at(0, fn d -> 86 | replace_fragment(d, remaining_indices, transform, false) 87 | |> unwrap_binary_field(is_field) 88 | end) 89 | |> Enum.reverse() 90 | end 91 | end 92 | 93 | @doc false 94 | def get_part_by_indices(data, []) do 95 | data 96 | end 97 | 98 | def get_part_by_indices(data, [i | remaining_indices]) do 99 | case data do 100 | nil -> 101 | data 102 | 103 | _ when is_binary(data) -> 104 | data 105 | 106 | _ when is_integer(i) and is_list(data) -> 107 | Enum.at(data, i) |> get_part_by_indices(remaining_indices) 108 | end 109 | end 110 | 111 | @doc ~S""" 112 | Extracts content from a parsed HL7 segment, 113 | returning nested data by applying each supplied index in turn. Please note that HL7 indices start at 1. 114 | """ 115 | 116 | @spec get_part( 117 | segment_hl7() | fragment_hl7(), 118 | field :: pos_integer(), 119 | repetition :: pos_integer() | nil, 120 | component :: pos_integer() | nil, 121 | subcomponent :: pos_integer() | nil 122 | ) :: nil | list() | binary() 123 | 124 | def get_part(data, field, repetition \\ nil, component \\ nil, subcomponent \\ nil) 125 | when is_integer(field) and 126 | (is_integer(repetition) or is_nil(repetition)) and 127 | (is_integer(component) or is_nil(component)) and 128 | (is_integer(subcomponent) or is_nil(subcomponent)) do 129 | indices = to_indices(field, repetition, component, subcomponent) 130 | get_part_by_indices(data, indices) 131 | end 132 | 133 | def leftmost_value([]) do 134 | nil 135 | end 136 | 137 | def leftmost_value([h | _]) do 138 | leftmost_value(h) 139 | end 140 | 141 | def leftmost_value(d) do 142 | d 143 | end 144 | 145 | @spec unwrap_binary_field(list() | String.t(), boolean()) :: list() | String.t() 146 | defp unwrap_binary_field([text], _is_field = true) when is_binary(text) do 147 | text 148 | end 149 | 150 | defp unwrap_binary_field(data, is_field) when is_boolean(is_field) do 151 | data 152 | end 153 | 154 | @spec empty_string_list(non_neg_integer()) :: [String.t()] 155 | defp empty_string_list(n) when is_integer(n) do 156 | empty_string_list([], n) 157 | end 158 | 159 | @spec empty_string_list([String.t()], non_neg_integer()) :: [String.t()] 160 | defp empty_string_list(list, 0) do 161 | list 162 | end 163 | 164 | defp empty_string_list(list, n) do 165 | empty_string_list(["" | list], n - 1) 166 | end 167 | 168 | defp to_indices(field, repetition, component, subcomponent) do 169 | [field + 1, repetition, component, subcomponent] 170 | |> Enum.take_while(fn i -> i != nil end) 171 | |> Enum.map(fn i -> i - 1 end) 172 | end 173 | end 174 | -------------------------------------------------------------------------------- /lib/hl7/examples.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Examples do 2 | @moduledoc """ 3 | Functions to provide sample HL7 data which can be used to explore the API. 4 | """ 5 | 6 | @doc """ 7 | Returns a sample HL7 string from [Wikipedia's HL7 article](https://en.wikipedia.org/wiki/Health_Level_7#Version_2_messaging). 8 | 9 | The HL7 version of the message defaults to 2.5, but can be overidden. 10 | """ 11 | 12 | @spec wikipedia_sample_hl7(String.t()) :: String.t() 13 | def wikipedia_sample_hl7(version \\ "2.5") 14 | 15 | def wikipedia_sample_hl7("2.5") do 16 | """ 17 | MSH|^~\\&|MegaReg|XYZHospC|SuperOE|XYZImgCtr|20060529090131-0500||ADT^A01^ADT_A01|01052901|P|2.5 18 | EVN||200605290901||||200605290900 19 | PID|||56782445^^^UAReg^PI||KLEINSAMPLE^BARRY^Q^JR||19620910|M||2028-9^^HL70005^RA99113^^XYZ|260 GOODWIN CREST DRIVE^^BIRMINGHAM^AL^35209^^M~NICKELL’S PICKLES^10000 W 100TH AVE^BIRMINGHAM^AL^35200^^O|||||||0105I30001^^^99DEF^AN 20 | PV1||I|W^389^1^UABH^^^^3||||12345^MORGAN^REX^J^^^MD^0010^UAMC^L||67890^GRAINGER^LUCY^X^^^MD^0010^UAMC^L|MED|||||A0||13579^POTTER^SHERMAN^T^^^MD^0010^UAMC^L|||||||||||||||||||||||||||200605290900 21 | OBX|1|N^K&M|^Body Height||1.80|m^Meter^ISO+|||||F 22 | OBX|2|NM|^Body Weight||79|kg^Kilogram^ISO+|||||F 23 | AL1|1||^ASPIRIN 24 | DG1|1||786.50^CHEST PAIN, UNSPECIFIED^I9|||A 25 | """ 26 | |> String.replace("\n", "\r") 27 | end 28 | 29 | def wikipedia_sample_hl7(version) when is_binary(version) do 30 | wikipedia_sample_hl7() 31 | |> String.replace("2.5", version, global: false) 32 | end 33 | 34 | def wikipedia_sample_hl7_alt_delimiters() do 35 | wikipedia_sample_hl7() 36 | |> String.replace("^", "*") 37 | end 38 | 39 | @spec nist_immunization_hl7() :: String.t() 40 | def nist_immunization_hl7() do 41 | """ 42 | MSH|^~\\&|Test EHR Application|X68||NIST Test Iz Reg|201207010822||VXU^V04^VXU_V04|NIST-IZ-020.00|P|2.5.1|||AL|ER 43 | PID|1||252430^^^MAA^MR||Curry^Qiang^Trystan^^^^L||20090819|M 44 | ORC|RE||IZ-783278^NDA|||||||||57422^RADON^NICHOLAS^^^^^^NDA^L 45 | RXA|0|1|20120814||140^Influenza^CVX|0.5|mL^MilliLiter [SI Volume Units]^UCUM||00^New immunization record^NIP001||||||W1356FE|20121214|SKB^GlaxoSmithKline^MVX|||CP|A 46 | RXR|C28161^Intramuscular^NCIT|RA^Right Arm^HL70163 47 | OBX|1|CE|64994-7^Vaccine funding program eligibility category^LN|1|V03^VFC eligible - Uninsured^HL70064||||||F|||20120701|||VXC40^Eligibility captured at the immunization level^CDCPHINVS 48 | OBX|2|CE|30956-7^vaccine type^LN|2|88^Influenza, unspecified formulation^CVX||||||F 49 | OBX|3|TS|29768-9^Date vaccine information statement published^LN|2|20120702||||||F 50 | OBX|4|TS|29769-7^Date vaccine information statement presented^LN|2|20120814||||||F 51 | ORC|RE||IZ-783276^NDA 52 | RXA|0|1|20110214||133^PCV 13^CVX|999|||01^Historical information - source unspecified^NIP001 53 | ORC|RE||IZ-783282^NDA|||||||||57422^RADON^NICHOLAS^^^^^^NDA^L 54 | RXA|0|1|20120814||110^DTaP-Hep B-IPV^CVX|0.5|mL^MilliLiter [SI Volume Units]^UCUM||00^New immunization record^NIP001||||||78HH34I|20121214|SKB^GlaxoSmithKline^MVX|||CP|A 55 | RXR|C28161^Intramuscular^NCIT|LA^Left Arm^HL70163 56 | OBX|1|CE|64994-7^Vaccine funding program eligibility category^LN|1|V03^VFC eligible - Uninsured^HL70064||||||F|||20120701|||VXC40^Eligibility captured at the immunization level^CDCPHINVS 57 | OBX|2|CE|30956-7^vaccine type^LN|2|107^DTaP^CVX||||||F 58 | OBX|3|TS|29768-9^Date vaccine information statement published^LN|2|20070517||||||F 59 | OBX|4|TS|29769-7^Date vaccine information statement presented^LN|2|20120814||||||F 60 | OBX|5|CE|30956-7^vaccine type^LN|3|89^Polio^CVX||||||F 61 | OBX|6|TS|29768-9^Date vaccine information statement published^LN|3|20111108||||||F 62 | OBX|7|TS|29769-7^Date vaccine information statement presented^LN|3|20120814||||||F 63 | OBX|8|CE|30956-7^vaccine type^LN|4|45^Hep B, unspecified formulation^CVX||||||F 64 | OBX|9|TS|29768-9^Date vaccine information statement published^LN|4|20120202||||||F 65 | OBX|10|TS|29769-7^Date vaccine information statement presented^LN|4|20120814||||||F 66 | """ 67 | |> String.replace("\n", "\r") 68 | end 69 | 70 | @spec nist_syndromic_hl7() :: String.t() 71 | def nist_syndromic_hl7() do 72 | """ 73 | MSH|^~\\&||LakeMichMC^9879874000^NPI|||201204020040||ADT^A03^ADT_A03|NIST-SS-003.32|P|2.5.1|||||||||PH_SS-NoAck^SS Sender^2.16.840.1.114222.4.10.3^ISO 74 | EVN||201204020030|||||LakeMichMC^9879874000^NPI 75 | PID|1||33333^^^^MR||^^^^^^~^^^^^^S|||F||2106-3^^CDCREC|^^^^53217^^^^55089|||||||||||2186-5^^CDCREC 76 | PV1|1||||||||||||||||||33333_001^^^^VN|||||||||||||||||09||||||||201204012130 77 | DG1|1||0074^Cryptosporidiosis^I9CDX|||F 78 | DG1|2||27651^Dehydration^I9CDX|||F 79 | DG1|3||78791^Diarrhea^I9CDX|||F 80 | OBX|1|CWE|SS003^^PHINQUESTION||261QE0002X^Emergency Care^NUCC||||||F 81 | OBX|2|NM|21612-7^^LN||45|a^^UCUM|||||F 82 | OBX|3|CWE|8661-1^^LN||^^^^^^^^Diarrhea, stomach pain, dehydration||||||F 83 | """ 84 | |> String.replace("\n", "\r") 85 | end 86 | 87 | @spec elr_example() :: String.t() 88 | def elr_example() do 89 | """ 90 | MSH|^~\\&#|EHR LAB^11.11.666.1.111.4.3.2.2.1.321.111^ISO|H Facility FACILITY^Oid^ISO|RCVING APPLICAT^1.11.111.1.1111111.3.1.1111^ISO|RCVING FACILITY^1.11.111.1.1111111.3.1.2222^ISO|20220907145828-0500||ORU^R01^ORU_R01|PHELR.1.45543|D|2.5.1|||||||||PHLabReport-NoAck^HL7^1.11.111.1.1111111.9.11^ISO 91 | SFT|EHR, Inc.^L^^^^EHR&1.3.6.1.4.1.24310&***^XX^^^EHR|5.67|Laboratory Applicati|********||******** 92 | PID|1||A0995614951^^^EHR LAB&1.11.111.1.1111111.4.3.2.2.1.321.111&ISO^MR^Facility&Oid&ISO~A09956149510^^^EHR LAB&1.11.111.1.1111111.4.3.2.2.1.321.111&ISO^SS^Facility&Oid&ISO~A09956149511^^^EHR LAB&1.11.111.1.1111111.4.3.2.2.1.321.111&ISO^PI^Facility&Oid&ISO~A09956149512^^^EHR LAB&1.11.111.1.1111111.4.3.2.2.1.321.111&ISO^AN^Facility&Oid&ISO||TEST^TEST^||19560927|M||2131-1^Other Race^HL70005^AI^ASIAN INDIAN^L^2.5.1^5.67|35544 TEST TEST^Apt. 535^Red Hook^NY^12571||111-111-111|111-111-1111||M|OTH^Other^HL70006|||||4||||||||N|||20160422123800-0500|Facility^Oid^ISO|337915000^Homo sapiens (organism)^SCT^Human^Human^L^1^1 93 | NK1|1|TEST^TEST^J|SPO^Spouse^HL70063^S^SPOUSE^L^2.5.1^5.67|99111 Street^Apt. 608^AnchoTESTrage^TT^99502|111-111-111||||||||||||||||||||||||||| 94 | PV1|1|I|J.CON^J.CON1^11|C|||TEST^TEST^TEST^L^^^MD|TEST^TEST^TEST^^^^MD|EST^TEST^TEST^^^^MD|MED||||2|||TEST^TEST^TEST^L^^^MD|IN||COMM||||||||||||||||01|||Facility||DI|||| 95 | ORC|RE|09984662^L103312.1^**************************^***|0907:QAX:C00002R.1^L103312.1^***^***|0907:QAX:C00002R^L103312^**************************^***||N|||************|||TEST^TEST^TEST^D^^^MD|||************||************||||************^L^^^^************&**************************&***^XX^^^LAB ID# 1234567|TEST PP^TEST 3, 96 | OBR|1|09984662^L103312.1^**************************^***|0907:QAX:C00002R.1^L103312.1^***^***|2951-2^Sodium [Moles/volume 97 | TQ1|1||||||*******************|*******************|R 98 | OBX|1|SN|2951-2^Sodium [Moles/volume# 99 | SPM|1|^L103312&********&***&***||*********^Blood specimen^SCT^BLOOD^BLOOD^L^**********^5.67|||||||||||||*******************^*******************|******************* 100 | ORC|RE|09984662^L103312.2^**************************^***|0907:QAX:C00002R.2^L103312.2^***^***|0907:QAX:C00002R^L103312^**************************^***||N|||************|||1^TEST^TEST^D^^^MD|||************||************||||************^L^^^^************&**************************&***^XX^^^LAB ID# 1234567|TEST PP^TEST 3, 101 | OBR|2|09984662^L103312.2^**************************^***|0907:QAX:C00002R.2^L103312.2^***^***|IMO0002^Potassium measurement^LN^K^POTASSIUM^L^2.40^5.67|||*******************|*******************||AS|||REASON FOR VISIT||||||09984662||************|*******************|||F|||||||TEST&TEST&TEST&&&&&&********&***&*** 102 | TQ1|1||||||*******************|*******************|R 103 | OBX|1|SN|IMO0002^Potassium measurement^LN^K^POTASSIUM^L^2.40^5.67||=^4.2|mEq/L^mEq/L^L^^^^5.67|3.5-5.0|N^Normal (applies to non-numeric results)^HL70078^N^N^L^2.5.1^5.67|||F|||*******************|||||*******************||||************^L^^^^************&**************************&***^XX^^^LAB ID# 1234567|TEST PP^TEST 3, 104 | SPM|1|^L103312&********&***&***||*********^Blood specimen^SCT^BLOOD^BLOOD^L^**********^5.67|||||||||||||*******************^*******************|******************* 105 | """ 106 | |> String.replace("\n", "\r") 107 | end 108 | end 109 | -------------------------------------------------------------------------------- /test/hl7/message_test.exs: -------------------------------------------------------------------------------- 1 | defmodule HL7.MessageTest do 2 | use ExUnit.Case 3 | require Logger 4 | 5 | doctest HL7.Message 6 | 7 | test "Can generate an Invalid Message Header" do 8 | header = %HL7.InvalidHeader{raw: "raw", reason: :unknown} 9 | assert header.raw == "raw" 10 | assert header.reason == :unknown 11 | end 12 | 13 | test "HL7 parse all versions" do 14 | ["2.1", "2.2", "2.3", "2.3.1", "2.4", "2.5", "2.5.1"] 15 | |> Enum.each(fn version -> 16 | make_example_message(version) 17 | make_header_message(version) 18 | end) 19 | end 20 | 21 | test "Create new roundtrip msh from header" do 22 | header = %HL7.Header{} = HL7.Header.new("ADT", "A04", "SAMPLE_ID") 23 | new_msg = HL7.Message.new(header) 24 | msh_from_msg = HL7.Message.to_list(new_msg) |> Enum.at(0) 25 | msh_from_header = HL7.Header.to_msh(header) 26 | 27 | assert msh_from_msg == msh_from_header 28 | assert new_msg.header.message_type == "ADT" 29 | assert new_msg.header.trigger_event == "A04" 30 | assert new_msg.header.message_control_id == "SAMPLE_ID" 31 | end 32 | 33 | test "Can build a Semi-Valid Message with Partial Header" do 34 | header = HL7.Header.new("ADT", "A04", "SAMPLE_ID") |> Map.put(:hl7_version, nil) 35 | new_msg = HL7.Message.new(header) 36 | msh_from_msg = HL7.Message.to_list(new_msg) |> Enum.at(0) 37 | msh_from_header = HL7.Header.to_msh(header) 38 | 39 | assert msh_from_msg == msh_from_header 40 | assert new_msg.header.message_type == "ADT" 41 | assert new_msg.header.trigger_event == "A04" 42 | assert new_msg.header.message_control_id == "SAMPLE_ID" 43 | assert new_msg.header.hl7_version == nil 44 | end 45 | 46 | test "Can generate an Invalid Message and Header from an invalid MSH" do 47 | header = %HL7.Header{} = HL7.Header.new("ADT", "A04", "SAMPLE_ID") 48 | new_msg = HL7.Message.new(header) 49 | msh_from_msg = HL7.Message.to_list(new_msg) |> Enum.at(0) 50 | bad_msh = msh_from_msg |> List.replace_at(9, "STUFF") 51 | 52 | bad_text = 53 | HL7.Message.to_list(new_msg) 54 | |> List.replace_at(0, bad_msh) 55 | |> HL7.Message.new() 56 | |> to_string() 57 | 58 | bad_msg = HL7.Message.new(bad_text) 59 | bad_header = bad_msg.header 60 | assert bad_header.reason == :invalid_message_type 61 | end 62 | 63 | test "Example HL7 roundtrips after going from new" do 64 | raw_text = HL7.Examples.wikipedia_sample_hl7() 65 | roundtrip = raw_text |> HL7.Message.new() |> to_string() 66 | assert roundtrip == raw_text 67 | end 68 | 69 | test "Example HL7 roundtrips going from new -- with excess trailing text fragments" do 70 | raw_text = HL7.Examples.wikipedia_sample_hl7() 71 | raw_text_with_garbage = raw_text <> "\rgarbage text" 72 | new_msg = raw_text_with_garbage |> HL7.Message.new() 73 | roundtrip = new_msg |> to_string() 74 | assert roundtrip == raw_text 75 | assert new_msg.fragments == [["garbage text"]] 76 | end 77 | 78 | test "Example HL7 roundtrips after going from raw" do 79 | raw_text = HL7.Examples.wikipedia_sample_hl7() 80 | raw_msg = %HL7.RawMessage{} = HL7.Message.raw(raw_text) 81 | roundtrip = raw_msg |> to_string() 82 | assert roundtrip == raw_text 83 | end 84 | 85 | test "Example HL7 roundtrips after going from raw to new" do 86 | raw_text = HL7.Examples.wikipedia_sample_hl7() 87 | roundtrip = raw_text |> HL7.Message.raw() |> HL7.Message.new() |> to_string() 88 | assert roundtrip == raw_text 89 | end 90 | 91 | test "Example HL7 roundtrips after going from new to raw" do 92 | raw_text = HL7.Examples.wikipedia_sample_hl7() 93 | roundtrip = raw_text |> HL7.Message.new() |> HL7.Message.raw() |> to_string() 94 | assert roundtrip == raw_text 95 | end 96 | 97 | test "A raw message passed into Message.raw returns itself" do 98 | raw = HL7.Examples.wikipedia_sample_hl7() |> HL7.Message.raw() 99 | assert raw == HL7.Message.raw(raw) 100 | end 101 | 102 | test "A new message passed into Message.new returns itself" do 103 | new = HL7.Examples.wikipedia_sample_hl7() |> HL7.Message.new() 104 | assert new == HL7.Message.new(new) 105 | end 106 | 107 | test "A message with truncation char works" do 108 | hl7 = HL7.Examples.elr_example() 109 | 110 | assert hl7 == 111 | HL7.Examples.elr_example() |> HL7.Message.new() |> HL7.Message.raw() |> to_string() 112 | end 113 | 114 | test "A raw message can return its list of segments" do 115 | segment_count = 116 | HL7.Examples.wikipedia_sample_hl7() 117 | |> HL7.Message.raw() 118 | |> HL7.Message.to_list() 119 | |> Enum.count() 120 | 121 | assert segment_count == 8 122 | end 123 | 124 | test "A fast-path using Parser should return the same list structure as Message" do 125 | # currently, raw to new uses the Message split as opposed to Parser 126 | text = HL7.Examples.wikipedia_sample_hl7() 127 | orig_list = HL7.Message.raw(text) |> HL7.Message.to_list() 128 | parser_list = HL7.Parser.parse(text, nil, false) 129 | assert orig_list == parser_list 130 | parser_list_via_copy = HL7.Parser.parse(text, nil, true) 131 | assert orig_list == parser_list_via_copy 132 | end 133 | 134 | test "A list passed into Message.to_list returns itself" do 135 | segments = HL7.Examples.wikipedia_sample_hl7() |> HL7.Message.raw() |> HL7.Message.to_list() 136 | assert segments == HL7.Message.to_list(segments) 137 | end 138 | 139 | test "A bogus message passed into Message.raw will result in InvalidMessage" do 140 | assert %HL7.InvalidMessage{} = HL7.Message.raw("Bogus message") 141 | end 142 | 143 | test "A bogus message passed into Message.new will result in InvalidMessage" do 144 | assert %HL7.InvalidMessage{} = HL7.Message.new("Bogus message") 145 | end 146 | 147 | test "A badly encoded message passed into Message.new will result in a Message without option `validate_string: true`" do 148 | latin1_text = <<220, 105, 178>> 149 | latin1_msg = HL7.Examples.wikipedia_sample_hl7() |> String.replace("A01", latin1_text) 150 | assert %HL7.Message{} = HL7.Message.new(latin1_msg) 151 | end 152 | 153 | test "A badly encoded message passed into Message.new will result in an InvalidMessage with option `validate_string: true`" do 154 | latin1_text = <<220, 105, 178>> 155 | latin1_msg = HL7.Examples.wikipedia_sample_hl7() |> String.replace("A01", latin1_text) 156 | assert %HL7.InvalidMessage{} = HL7.Message.new(latin1_msg, %{validate_string: true}) 157 | end 158 | 159 | test "An incomplete header passed into Message.new will result in InvalidMessage" do 160 | missing_message_type = 161 | HL7.Examples.wikipedia_sample_hl7() |> String.replace("ADT^A01^ADT_A01", "") 162 | 163 | assert %HL7.InvalidMessage{} = HL7.Message.new(missing_message_type) 164 | end 165 | 166 | test "Can search a raw message for a segment name" do 167 | field_count = 168 | HL7.Examples.wikipedia_sample_hl7() 169 | |> HL7.Message.raw() 170 | |> HL7.Message.find("PID") 171 | |> Enum.count() 172 | 173 | assert field_count == 19 174 | end 175 | 176 | test "Can search a raw text for a segment name" do 177 | field_count = 178 | HL7.Examples.wikipedia_sample_hl7() 179 | |> HL7.Message.find("PID") 180 | |> Enum.count() 181 | 182 | assert field_count == 19 183 | end 184 | 185 | test "Can search a full message for a segment name" do 186 | field_count = 187 | HL7.Examples.wikipedia_sample_hl7() 188 | |> HL7.Message.new() 189 | |> HL7.Message.find("PID") 190 | |> Enum.count() 191 | 192 | assert field_count == 19 193 | end 194 | 195 | test "Can search a segment list for a segment name" do 196 | field_count = 197 | HL7.Examples.wikipedia_sample_hl7() 198 | |> HL7.Message.to_list() 199 | |> HL7.Message.find("PID") 200 | |> Enum.count() 201 | 202 | assert field_count == 19 203 | end 204 | 205 | test "Calling get_segments breaks a valid message into lists" do 206 | min_parsed = [ 207 | [ 208 | "MSH", 209 | "|", 210 | "^~\\&", 211 | "MegaReg", 212 | "XYZHospC", 213 | "SuperOE", 214 | "XYZImgCtr", 215 | "20060529090131-0500", 216 | "", 217 | [["ADT", "A01", "ADT_A01"]], 218 | "01052901", 219 | "P", 220 | "2.5" 221 | ], 222 | ["EVN", "", "200605290901", "", "", "", "200605290900"], 223 | [ 224 | "PID", 225 | "", 226 | "", 227 | [["56782445", "", "", "UAReg", "PI"]], 228 | "", 229 | [["KLEINSAMPLE", "BARRY", "Q", "JR"]], 230 | "", 231 | "19620910", 232 | "M", 233 | "", 234 | [["2028-9", "", "HL70005", "RA99113", "", "XYZ"]], 235 | [ 236 | ["260 GOODWIN CREST DRIVE", "", "BIRMINGHAM", "AL", "35209", "", "M"], 237 | ["NICKELL’S PICKLES", "10000 W 100TH AVE", "BIRMINGHAM", "AL", "35200", "", "O"] 238 | ], 239 | "", 240 | "", 241 | "", 242 | "", 243 | "", 244 | "", 245 | [["0105I30001", "", "", "99DEF", "AN"]] 246 | ], 247 | [ 248 | "PV1", 249 | "", 250 | "I", 251 | [["W", "389", "1", "UABH", "", "", "", "3"]], 252 | "", 253 | "", 254 | "", 255 | [["12345", "MORGAN", "REX", "J", "", "", "MD", "0010", "UAMC", "L"]], 256 | "", 257 | [["67890", "GRAINGER", "LUCY", "X", "", "", "MD", "0010", "UAMC", "L"]], 258 | "MED", 259 | "", 260 | "", 261 | "", 262 | "", 263 | "A0", 264 | "", 265 | [["13579", "POTTER", "SHERMAN", "T", "", "", "MD", "0010", "UAMC", "L"]], 266 | "", 267 | "", 268 | "", 269 | "", 270 | "", 271 | "", 272 | "", 273 | "", 274 | "", 275 | "", 276 | "", 277 | "", 278 | "", 279 | "", 280 | "", 281 | "", 282 | "", 283 | "", 284 | "", 285 | "", 286 | "", 287 | "", 288 | "", 289 | "", 290 | "", 291 | "", 292 | "200605290900" 293 | ], 294 | [ 295 | "OBX", 296 | "1", 297 | [["N", ["K", "M"]]], 298 | [["", "Body Height"]], 299 | "", 300 | "1.80", 301 | [["m", "Meter", "ISO+"]], 302 | "", 303 | "", 304 | "", 305 | "", 306 | "F" 307 | ], 308 | [ 309 | "OBX", 310 | "2", 311 | "NM", 312 | [["", "Body Weight"]], 313 | "", 314 | "79", 315 | [["kg", "Kilogram", "ISO+"]], 316 | "", 317 | "", 318 | "", 319 | "", 320 | "F" 321 | ], 322 | ["AL1", "1", "", [["", "ASPIRIN"]]], 323 | ["DG1", "1", "", [["786.50", "CHEST PAIN, UNSPECIFIED", "I9"]], "", "", "A"] 324 | ] 325 | 326 | assert min_parsed == HL7.Examples.wikipedia_sample_hl7() |> HL7.Message.to_list() 327 | end 328 | 329 | defp make_example_message(version) do 330 | raw_text = HL7.Examples.wikipedia_sample_hl7(version) 331 | hl7_msg = HL7.Message.new(raw_text) 332 | header = hl7_msg.header 333 | rebuilt_raw = hl7_msg |> to_string() 334 | assert rebuilt_raw == raw_text 335 | assert header.message_type == "ADT" 336 | assert header.trigger_event == "A01" 337 | assert header.sending_facility == "XYZHospC" 338 | assert header.hl7_version == version 339 | assert header.message_date_time == "20060529090131-0500" 340 | end 341 | 342 | defp make_header_message(version) do 343 | header = %HL7.Header{} = HL7.Header.new("ADT", "A04", "SAMPLE_ID", "P", version) 344 | hl7_msg = HL7.Message.new(header) 345 | hl7_msg_header = hl7_msg.header 346 | 347 | assert hl7_msg_header.message_type == "ADT" 348 | assert hl7_msg_header.trigger_event == "A04" 349 | assert hl7_msg_header.hl7_version == version 350 | end 351 | end 352 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2018 HCA Healthcare 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /lib/hl7/message.ex: -------------------------------------------------------------------------------- 1 | defmodule HL7.Message do 2 | require Logger 3 | 4 | @moduledoc """ 5 | Creates, parses and modifies HL7 messages with a focus on performance. Contains a list of parsed segments and header metadata. 6 | 7 | Use `HL7.Message.new/2` to create an `t:HL7.Message.t/0` struct that contains a fully parsed HL7 message alongside header metadata. 8 | The parsed data is represented as minimally as possible as lists of string and lists. 9 | 10 | 11 | The second argument is an options map supporting the following values: 12 | 13 | `copy: true` -- Will create binary copies while parsing to avoid keeping references. 14 | 15 | `validate_string: true` -- Will generate an `HL7.InvalidMessage` if the source text is not UTF-8 compatible. 16 | """ 17 | 18 | @moduledoc deprecated: "Use `HL7` instead." 19 | 20 | alias HL7.Path 21 | 22 | @segment_terminator "\r" 23 | 24 | @type t :: %HL7.Message{ 25 | segments: nil | list(), 26 | header: nil | HL7.Header.t() 27 | } 28 | 29 | @type raw_hl7 :: String.t() | HL7.RawMessage.t() 30 | @type fragment_hl7 :: String.t() | [list() | String.t()] 31 | @type segment_hl7 :: [fragment_hl7()] 32 | @type parsed_hl7 :: [segment_hl7()] | HL7.Message.t() 33 | @type content_hl7 :: raw_hl7() | parsed_hl7() 34 | 35 | defstruct segments: nil, 36 | fragments: [], 37 | header: nil, 38 | tag: %{} 39 | 40 | @doc """ 41 | Creates an `HL7.Message` struct containing the raw HL7 text for further processing. It will 42 | also expose basic header information (e.g. encoding characters, message type) for routing. 43 | 44 | Invalid MSH formats will return an `HL7.InvalidMessage` struct. 45 | """ 46 | 47 | @spec raw(content_hl7()) :: HL7.RawMessage.t() | HL7.InvalidMessage.t() 48 | def raw( 49 | <<"MSH", field_separator::binary-size(1), _encoding_characters::binary-size(5), 50 | field_separator::binary-size(1), _::binary>> = raw_text 51 | ) do 52 | parse_raw_hl7(raw_text) 53 | end 54 | 55 | def raw( 56 | <<"MSH", field_separator::binary-size(1), _encoding_characters::binary-size(4), 57 | field_separator::binary-size(1), _::binary>> = raw_text 58 | ) do 59 | parse_raw_hl7(raw_text) 60 | end 61 | 62 | def raw(segments) when is_list(segments) do 63 | [msh | other_segments] = segments 64 | [name, field_separator | msh_tail] = msh 65 | [encoding_characters | _] = msh_tail 66 | msh_without_field_separator = [name | msh_tail] 67 | 68 | [component, repeat, _escape_char, subcomponent | _truncation_char] = 69 | String.graphemes(encoding_characters) 70 | 71 | join_by_character_list = [field_separator, repeat, component, subcomponent] 72 | 73 | raw_text = 74 | join_with_separators( 75 | [msh_without_field_separator | other_segments], 76 | [@segment_terminator | join_by_character_list] 77 | ) <> @segment_terminator 78 | 79 | HL7.Message.raw(raw_text) 80 | end 81 | 82 | def raw(raw_text) when is_binary(raw_text) do 83 | %HL7.InvalidMessage{ 84 | raw: raw_text, 85 | created_at: DateTime.utc_now(), 86 | reason: :missing_header_or_encoding 87 | } 88 | end 89 | 90 | def raw(%HL7.Message{segments: segments}) do 91 | HL7.Message.raw(segments) 92 | end 93 | 94 | def raw(%HL7.RawMessage{} = raw_msg) do 95 | raw_msg 96 | end 97 | 98 | defp parse_raw_hl7(raw_text) do 99 | header = extract_header(raw_text) 100 | 101 | case header do 102 | %HL7.Header{} -> 103 | %HL7.RawMessage{raw: raw_text, header: header} 104 | 105 | %HL7.InvalidHeader{} -> 106 | %HL7.InvalidMessage{ 107 | raw: raw_text, 108 | created_at: DateTime.utc_now(), 109 | header: header, 110 | reason: :invalid_header 111 | } 112 | end 113 | end 114 | 115 | @doc ~S""" 116 | Creates an `HL7.Message` struct containing parsed segment list data. It will 117 | also expose basic header information (e.g. encoding characters, message type) for routing. 118 | 119 | Pass `copy: true` as the second argument to generate binary copies of all substrings as it parses the message. 120 | 121 | Invalid MSH formats will return an `HL7.InvalidMessage`. 122 | 123 | ## Examples 124 | 125 | iex> HL7.Examples.wikipedia_sample_hl7() 126 | ...> |> HL7.Message.new() 127 | ...> |> HL7.Query.get_segment_names() 128 | ["MSH", "EVN", "PID", "PV1", "OBX", "OBX", "AL1", "DG1"] 129 | 130 | iex> HL7.Message.new( 131 | ...> "MSH|^~\\&|MegaReg|XYZHospC|SuperOE|XYZImgCtr|" <> 132 | ...> "20060529090131-0500||ADT^A01^ADT_A01|01052901|P|2.5") 133 | ...> |> HL7.Query.get_segment_names() 134 | ["MSH"] 135 | 136 | iex> HL7.Message.new( 137 | ...> [["MSH", "|", "^~\\&", "App", "Facility", "", "", 138 | ...> "20060529090131-0500", "", [["ADT", "A01", "ADT_A01"]], 139 | ...> "01052901", "P", "2.5"]]) 140 | ...> |> HL7.Query.get_segment_names() 141 | ["MSH"] 142 | 143 | """ 144 | @spec new(content_hl7() | HL7.Header.t(), map()) :: HL7.Message.t() | HL7.InvalidMessage.t() 145 | 146 | def new(content, options \\ %{copy: false}) 147 | 148 | def new(%HL7.RawMessage{raw: raw_text}, options) do 149 | new(raw_text, options) 150 | end 151 | 152 | def new(%HL7.Message{} = msg, _options) do 153 | msg 154 | end 155 | 156 | def new(%HL7.InvalidMessage{} = msg, _options) do 157 | msg 158 | end 159 | 160 | def new(%HL7{} = msg, _options) do 161 | HL7.to_list(msg) |> new() |> Map.put(:tag, msg.tags) 162 | end 163 | 164 | def new(<<"MSH|^~\\&", _rest::binary>> = raw_text, options) do 165 | with {:ok, text} <- validate_text(raw_text, options) do 166 | copy = options[:copy] == true 167 | parsed_segments = HL7.Parser.parse(text, nil, copy) 168 | new_from_parsed_segments(text, parsed_segments) 169 | end 170 | end 171 | 172 | def new(<<"MSH|^~\\&#", _rest::binary>> = raw_text, options) do 173 | with {:ok, text} <- validate_text(raw_text, options) do 174 | copy = options[:copy] == true 175 | parsed_segments = HL7.Parser.parse(text, nil, copy) 176 | new_from_parsed_segments(text, parsed_segments) 177 | end 178 | end 179 | 180 | def new( 181 | <<"MSH", field::binary-size(1), _::binary-size(4), field::binary-size(1), _::binary>> = 182 | raw_text, 183 | options 184 | ) do 185 | with {:ok, text} <- validate_text(raw_text, options) do 186 | copy = options[:copy] == true 187 | separators = HL7.Separators.new(text) 188 | parsed_segments = HL7.Parser.parse(text, separators, copy) 189 | new_from_parsed_segments(text, parsed_segments) 190 | end 191 | end 192 | 193 | def new( 194 | <<"MSH", field::binary-size(1), _::binary-size(5), field::binary-size(1), _::binary>> = 195 | raw_text, 196 | options 197 | ) do 198 | with {:ok, text} <- validate_text(raw_text, options) do 199 | copy = options[:copy] == true 200 | separators = HL7.Separators.new(text) 201 | parsed_segments = HL7.Parser.parse(text, separators, copy) 202 | new_from_parsed_segments(text, parsed_segments) 203 | end 204 | end 205 | 206 | def new(raw_text, _options) when is_binary(raw_text) do 207 | %HL7.InvalidMessage{ 208 | raw: raw_text, 209 | created_at: DateTime.utc_now(), 210 | reason: :missing_header 211 | } 212 | end 213 | 214 | def new(segments, _) when is_list(segments) do 215 | %HL7.Message{segments: segments, header: extract_header(segments)} 216 | end 217 | 218 | def new(%HL7.Header{} = header, _) do 219 | msh = HL7.Header.to_msh(header) 220 | HL7.Message.new([msh]) 221 | end 222 | 223 | @doc """ 224 | Returns a parsed list of segments from an HL7 message or content. 225 | """ 226 | @spec to_list(content_hl7()) :: [list()] 227 | def to_list(msg) when is_list(msg) do 228 | msg 229 | end 230 | 231 | def to_list(%HL7.Message{segments: segments}) do 232 | segments 233 | end 234 | 235 | def to_list(%HL7.InvalidMessage{}) do 236 | raise RuntimeError, "invalid HL7 data" 237 | end 238 | 239 | def to_list(%HL7.RawMessage{} = msg) do 240 | msg |> HL7.Message.new() |> to_list() 241 | end 242 | 243 | def to_list(raw_text) when is_binary(raw_text) do 244 | raw_text |> HL7.Message.new() |> to_list() 245 | end 246 | 247 | @doc """ 248 | Returns the first parsed segment matching `segment_name` from an HL7 message or content. 249 | """ 250 | 251 | @spec find(content_hl7(), String.t() | non_neg_integer()) :: segment_hl7() | nil 252 | 253 | def find(segments, segment_name) 254 | when is_list(segments) and is_binary(segment_name) do 255 | segments 256 | |> Enum.find(fn segment -> 257 | [h | _] = segment 258 | h == segment_name 259 | end) 260 | end 261 | 262 | def find(%HL7.Message{segments: segments}, segment_name) 263 | when is_list(segments) and is_binary(segment_name) do 264 | segments 265 | |> find(segment_name) 266 | end 267 | 268 | def find(%HL7.RawMessage{} = msg, segment_name) 269 | when is_binary(segment_name) do 270 | msg 271 | |> to_list() 272 | |> find(segment_name) 273 | end 274 | 275 | def find(raw_text, segment_name) 276 | when is_binary(raw_text) and is_binary(segment_name) do 277 | raw_text 278 | |> to_list() 279 | |> find(segment_name) 280 | end 281 | 282 | @doc false 283 | 284 | # utility method for HL7.Query 285 | 286 | @spec update_segments(list(), Path.t(), list() | String.t() | nil | function()) :: 287 | list() 288 | def update_segments(segments, %Path{data: {segment_name, indices}}, transform) do 289 | segments 290 | |> Enum.map(fn 291 | [^segment_name | _] = segment -> 292 | HL7.Segment.replace_fragment(segment, indices, transform, true) 293 | 294 | segment -> 295 | segment 296 | end) 297 | end 298 | 299 | def update_segments(segments, %Path{data: indices}, transform) when is_list(indices) do 300 | segments 301 | |> Enum.map(fn segment -> HL7.Segment.replace_fragment(segment, indices, transform, true) end) 302 | end 303 | 304 | # ----------------- 305 | # Private functions 306 | # ----------------- 307 | 308 | @spec get_raw_msh_segment(String.t()) :: String.t() 309 | defp get_raw_msh_segment(<<"MSH", _::binary>> = raw_text) do 310 | raw_text 311 | |> String.split(@segment_terminator, parts: 2) 312 | |> Enum.at(0) 313 | end 314 | 315 | @spec split_segment_text(String.t(), HL7.Separators.t()) :: list() 316 | defp split_segment_text(<<"MSH", _rest::binary>> = raw_text, separators) do 317 | raw_text 318 | |> strip_msh_encoding 319 | |> split_into_fields(separators) 320 | |> add_msh_encoding_fields(separators) 321 | end 322 | 323 | defp split_segment_text(raw_text, separators) do 324 | raw_text |> split_into_fields(separators) 325 | end 326 | 327 | @spec split_into_fields(String.t(), HL7.Separators.t()) :: list() 328 | defp split_into_fields(text, separators) do 329 | text 330 | |> String.split(separators.field) 331 | |> Enum.map(&split_with_text_delimiters(&1, separators)) 332 | end 333 | 334 | @spec split_with_text_delimiters(String.t(), HL7.Separators.t()) :: list() | String.t() 335 | defp split_with_text_delimiters("", _separators) do 336 | "" 337 | end 338 | 339 | defp split_with_text_delimiters(text, separators) do 340 | delimiters = get_delimiters_in_text(text, separators) 341 | text |> split_with_separators(delimiters) 342 | end 343 | 344 | @spec get_delimiters_in_text(String.t(), HL7.Separators.t()) :: list(String.t()) 345 | defp get_delimiters_in_text(text, separators) do 346 | find_delimiters(text, separators.delimiter_check) 347 | end 348 | 349 | @spec find_delimiters(String.t(), list(String.t())) :: list(String.t()) 350 | defp find_delimiters(_text, []) do 351 | [] 352 | end 353 | 354 | defp find_delimiters(text, [split_character | remaining] = delimiters) do 355 | case text |> String.contains?(split_character) do 356 | true -> Enum.reverse(delimiters) 357 | false -> find_delimiters(text, remaining) 358 | end 359 | end 360 | 361 | @spec split_with_separators(String.t(), [String.t()]) :: list() | String.t() 362 | 363 | defp split_with_separators("", _) do 364 | "" 365 | end 366 | 367 | defp split_with_separators(text, [split_character | remaining_characters]) do 368 | text 369 | |> String.split(split_character) 370 | |> Enum.map(&split_with_separators(&1, remaining_characters)) 371 | |> Enum.map(&unwrap_length_one_lists(&1)) 372 | end 373 | 374 | defp split_with_separators(text, []) do 375 | text 376 | end 377 | 378 | @spec unwrap_length_one_lists(list()) :: list() | String.t() 379 | defp unwrap_length_one_lists(v) do 380 | case v do 381 | [text] when is_binary(text) -> text 382 | _ -> v 383 | end 384 | end 385 | 386 | @spec join_with_separators(String.t() | list(), [String.t()]) :: String.t() 387 | defp join_with_separators(text, separators) when is_binary(text) and is_list(separators) do 388 | text 389 | end 390 | 391 | defp join_with_separators(lists, [split_character | remaining_characters]) do 392 | lists 393 | |> Enum.map(&join_with_separators(&1, remaining_characters)) 394 | |> Enum.join(split_character) 395 | end 396 | 397 | @spec strip_msh_encoding(String.t()) :: String.t() 398 | defp strip_msh_encoding( 399 | <<"MSH", field_separator::binary-size(1), _encoding_chars::binary-size(4), 400 | truncation_char::binary-size(1), field_separator::binary-size(1), msh_rest::binary>> 401 | ) 402 | when truncation_char != field_separator do 403 | "MSH" <> field_separator <> msh_rest 404 | end 405 | 406 | defp strip_msh_encoding(<<"MSH", _encoding_chars::binary-size(5), msh_rest::binary>>) do 407 | "MSH" <> msh_rest 408 | end 409 | 410 | @spec add_msh_encoding_fields([String.t()], HL7.Separators.t()) :: list() 411 | defp add_msh_encoding_fields([msh_name | msh_tail], separators) do 412 | [msh_name, separators.field, separators.encoding_characters | msh_tail] 413 | end 414 | 415 | @spec extract_header(String.t() | list()) :: HL7.Header.t() | HL7.InvalidHeader.t() 416 | defp extract_header(raw_text) when is_binary(raw_text) do 417 | separators = HL7.Separators.new(raw_text) 418 | msh = raw_text |> get_raw_msh_segment() |> split_segment_text(separators) 419 | get_header_from_msh(msh) 420 | end 421 | 422 | defp extract_header(segments) when is_list(segments) do 423 | msh = HL7.Message.find(segments, "MSH") 424 | get_header_from_msh(msh) 425 | end 426 | 427 | defp get_header_from_msh(msh) when is_list(msh) do 428 | destructure( 429 | [ 430 | _segment_type, 431 | field_separator, 432 | encoding_characters, 433 | sending_application, 434 | sending_facility, 435 | receiving_application, 436 | receiving_facility, 437 | message_date_time, 438 | security, 439 | message_type_and_trigger_event_content, 440 | message_control_id, 441 | processing_id, 442 | hl7_version 443 | ], 444 | msh 445 | ) 446 | 447 | {message_type_valid, message_type_info} = 448 | get_message_type_info(message_type_and_trigger_event_content) 449 | 450 | case message_type_valid do 451 | true -> 452 | {message_type, trigger_event} = message_type_info 453 | 454 | %HL7.Header{ 455 | separators: HL7.Separators.new(field_separator, encoding_characters), 456 | sending_application: sending_application |> leftmost_value(), 457 | sending_facility: sending_facility |> leftmost_value(), 458 | receiving_application: receiving_application |> leftmost_value(), 459 | receiving_facility: receiving_facility |> leftmost_value(), 460 | message_date_time: message_date_time |> leftmost_value(), 461 | message_type: message_type, 462 | trigger_event: trigger_event, 463 | security: security, 464 | message_control_id: message_control_id, 465 | processing_id: processing_id, 466 | hl7_version: hl7_version |> leftmost_value() 467 | } 468 | 469 | false -> 470 | %HL7.InvalidHeader{raw: msh, reason: message_type_info} 471 | end 472 | end 473 | 474 | defp get_header_from_msh(msh) do 475 | %HL7.InvalidHeader{raw: msh, reason: :unknown_reason} 476 | end 477 | 478 | defp get_message_type_info(content) do 479 | case content do 480 | [[m, t | _] | _] -> {true, {m, t}} 481 | <> -> {true, {m, ""}} 482 | _ -> {false, :invalid_message_type} 483 | end 484 | end 485 | 486 | defp leftmost_value([]) do 487 | nil 488 | end 489 | 490 | defp leftmost_value([h | _]) do 491 | leftmost_value(h) 492 | end 493 | 494 | defp leftmost_value(d) do 495 | d 496 | end 497 | 498 | defp new_from_parsed_segments(raw_text, parsed_segments) 499 | when is_binary(raw_text) and is_list(parsed_segments) do 500 | {segments, fragments} = 501 | parsed_segments 502 | |> Enum.split_with(fn 503 | [<<_name::binary-size(3)>> | _rest] -> true 504 | _ -> false 505 | end) 506 | 507 | header = get_header_from_msh(List.first(segments)) 508 | 509 | case header do 510 | %HL7.Header{} -> 511 | %HL7.Message{segments: segments, fragments: fragments, header: header} 512 | 513 | _ -> 514 | %HL7.InvalidMessage{ 515 | raw: raw_text, 516 | created_at: DateTime.utc_now(), 517 | header: header, 518 | reason: :invalid_header 519 | } 520 | end 521 | end 522 | 523 | defp validate_text(raw_text, options) do 524 | validate_string = options[:validate_string] == true 525 | 526 | if !validate_string or String.valid?(raw_text) do 527 | {:ok, raw_text} 528 | else 529 | %HL7.InvalidMessage{ 530 | raw: raw_text, 531 | created_at: DateTime.utc_now(), 532 | reason: :invalid_text_encoding 533 | } 534 | end 535 | end 536 | 537 | defimpl String.Chars, for: HL7.Message do 538 | require Logger 539 | 540 | @spec to_string(HL7.Message.t()) :: String.t() 541 | def to_string(%HL7.Message{segments: segments}) do 542 | HL7.Message.raw(segments) |> Map.get(:raw) 543 | end 544 | end 545 | end 546 | -------------------------------------------------------------------------------- /test/hl7/query_test.exs: -------------------------------------------------------------------------------- 1 | defmodule HL7.QueryTest do 2 | use ExUnit.Case 3 | require Logger 4 | doctest HL7.Query 5 | doctest HL7.Message 6 | require HL7.Query 7 | import HL7.Query 8 | 9 | import ExUnit.CaptureIO 10 | 11 | @wiki HL7.Examples.wikipedia_sample_hl7() |> HL7.Message.new() 12 | @nist HL7.Examples.nist_immunization_hl7() |> HL7.Message.new() 13 | # placed here for viewing convenience 14 | def wiki() do 15 | """ 16 | MSH|^~\\&|MegaReg|XYZHospC|SuperOE|XYZImgCtr|20060529090131-0500||ADT^A01^ADT_A01|01052901|P|2.5 17 | EVN||200605290901||||200605290900 18 | PID|||56782445^^^UAReg^PI||KLEINSAMPLE^BARRY^Q^JR||19620910|M||2028-9^^HL70005^RA99113^^XYZ|260 GOODWIN CREST DRIVE^^BIRMINGHAM^AL^35209^^M~NICKELL’S PICKLES^10000 W 100TH AVE^BIRMINGHAM^AL^35200^^O|||||||0105I30001^^^99DEF^AN 19 | PV1||I|W^389^1^UABH^^^^3||||12345^MORGAN^REX^J^^^MD^0010^UAMC^L||67890^GRAINGER^LUCY^X^^^MD^0010^UAMC^L|MED|||||A0||13579^POTTER^SHERMAN^T^^^MD^0010^UAMC^L|||||||||||||||||||||||||||200605290900 20 | OBX|1|N^K&M|^Body Height||1.80|m^Meter^ISO+|||||F 21 | OBX|2|NM|^Body Weight||79|kg^Kilogram^ISO+|||||F 22 | AL1|1||^ASPIRIN 23 | DG1|1||786.50^CHEST PAIN, UNSPECIFIED^I9|||A 24 | """ 25 | |> String.replace("\n", "\r") 26 | end 27 | 28 | test "sigil_g" do 29 | assert HL7.Path.new("PID-3.1") == ~p{PID-3.1} 30 | end 31 | 32 | test "Default query is struct with correct defaults" do 33 | query = %HL7.Query{} 34 | assert query.selections == [] 35 | assert query.invalid_message == nil 36 | assert query.part == nil 37 | end 38 | 39 | test "Default selection in query is struct with correct defaults" do 40 | selection = %HL7.Selection{} 41 | assert selection.segments == [] 42 | end 43 | 44 | test "Default separators equals struct from new" do 45 | separators = %HL7.Separators{} 46 | %HL7.Separators{field: field, encoding_characters: encoding_characters} = separators 47 | assert separators == HL7.Separators.new(field, encoding_characters) 48 | end 49 | 50 | test "Modified separators equals struct from new" do 51 | separators = %HL7.Separators{field: "#"} 52 | %HL7.Separators{field: field, encoding_characters: encoding_characters} = separators 53 | assert separators == HL7.Separators.new(field, encoding_characters) 54 | end 55 | 56 | test "Select of query returns itself" do 57 | query = new(@wiki) 58 | assert new(query) == query 59 | end 60 | 61 | test "Select invalid message returns query with embedded invalid message" do 62 | invalid_msg = HL7.Message.new("invalid content") 63 | query = new(invalid_msg) 64 | assert query.invalid_message == invalid_msg 65 | end 66 | 67 | test "Query back to message" do 68 | m = new(@wiki) |> to_message() |> to_string 69 | assert m == wiki() 70 | end 71 | 72 | test "get all segment names" do 73 | names = new(@wiki) |> get_segment_names() 74 | assert names == ["MSH", "EVN", "PID", "PV1", "OBX", "OBX", "AL1", "DG1"] 75 | end 76 | 77 | test "select one simple segment" do 78 | groups = select(@wiki, "MSH") |> get_segment_groups() 79 | segments = groups |> List.first() 80 | segment = segments |> List.first() 81 | 82 | assert Enum.count(groups) == 1 83 | assert Enum.count(segments) == 1 84 | assert Enum.at(segment, 4) == "XYZHospC" 85 | end 86 | 87 | test "select multiple simple segments" do 88 | groups = select(@wiki, "OBX") |> get_segment_groups() 89 | segments = groups |> List.first() 90 | segment = segments |> List.first() 91 | 92 | assert Enum.count(groups) == 2 93 | assert Enum.count(segments) == 1 94 | assert Enum.at(segment, 5) == "1.80" 95 | end 96 | 97 | test "select one segment group" do 98 | groups = select(@wiki, "OBX AL1 DG1") |> get_segment_groups() 99 | segments = groups |> List.first() 100 | segment = segments |> Enum.at(1) 101 | 102 | assert Enum.count(groups) == 1 103 | assert Enum.count(segments) == 3 104 | assert HL7.Segment.get_part(segment, 3, 1, 2) == "ASPIRIN" 105 | end 106 | 107 | test "select one segment group from segments as list data" do 108 | list_data = HL7.Message.new(@wiki) |> HL7.Message.to_list() 109 | groups = select(list_data, "OBX AL1 DG1") |> get_segment_groups() 110 | segments = groups |> List.first() 111 | segment = segments |> Enum.at(1) 112 | 113 | assert Enum.count(groups) == 1 114 | assert Enum.count(segments) == 3 115 | assert HL7.Segment.get_part(segment, 3, 1, 2) == "ASPIRIN" 116 | end 117 | 118 | test "select one segment group from segments as HL7 Message struct" do 119 | msg = HL7.Message.new(@wiki) 120 | groups = select(msg, "OBX AL1 DG1") |> get_segment_groups() 121 | segments = groups |> List.first() 122 | segment = segments |> Enum.at(1) 123 | 124 | assert Enum.count(groups) == 1 125 | assert Enum.count(segments) == 3 126 | assert HL7.Segment.get_part(segment, 3, 1, 2) == "ASPIRIN" 127 | end 128 | 129 | test "select segment groups with optional segments" do 130 | groups = select(@wiki, "OBX [AL1] [DG1]") |> get_segment_groups() 131 | count = select(@wiki, "OBX [AL1] [DG1]") |> count() 132 | segments = groups |> Enum.at(1) 133 | segment = segments |> Enum.at(1) 134 | 135 | assert count == 2 136 | assert Enum.count(groups) == 2 137 | assert Enum.count(segments) == 3 138 | assert HL7.Segment.get_part(segment, 3, 1, 2) == "ASPIRIN" 139 | end 140 | 141 | test "select NO segments via groups with mismatch" do 142 | segments = select(@wiki, "OBX DG1") |> get_segment_groups() 143 | count = select(@wiki, "OBX DG1") |> count() 144 | assert segments == [] 145 | assert count == 0 146 | end 147 | 148 | test "select wildcard sections with a leading segment type" do 149 | segment_names = select(@nist, "ORC*") |> map(fn q -> get_segment_names(q) end) 150 | 151 | assert [ 152 | ["ORC", "RXA", "RXR", "OBX", "OBX", "OBX", "OBX"], 153 | ["ORC", "RXA"], 154 | [ 155 | "ORC", 156 | "RXA", 157 | "RXR", 158 | "OBX", 159 | "OBX", 160 | "OBX", 161 | "OBX", 162 | "OBX", 163 | "OBX", 164 | "OBX", 165 | "OBX", 166 | "OBX", 167 | "OBX" 168 | ] 169 | ] == segment_names 170 | end 171 | 172 | test "select wildcard sections with a breaking segment type" do 173 | segment_names = select(@nist, "ORC !ORC") |> map(fn q -> get_segment_names(q) end) 174 | 175 | assert [ 176 | ["ORC", "RXA", "RXR", "OBX", "OBX", "OBX", "OBX"], 177 | ["ORC", "RXA"], 178 | [ 179 | "ORC", 180 | "RXA", 181 | "RXR", 182 | "OBX", 183 | "OBX", 184 | "OBX", 185 | "OBX", 186 | "OBX", 187 | "OBX", 188 | "OBX", 189 | "OBX", 190 | "OBX", 191 | "OBX" 192 | ] 193 | ] == segment_names 194 | end 195 | 196 | test "extract a segment field from the first segment" do 197 | message = new(@wiki) 198 | part = find_first(message, ~p"3") 199 | assert part == "MegaReg" 200 | 201 | assert part == get_part(message, "3") 202 | end 203 | 204 | test "extract a segment field from the first named segment" do 205 | part = new(@wiki) |> find_first(~p"OBX-2") 206 | assert part == [["N", ["K", "M"]]] 207 | end 208 | 209 | test "extract part of a segment repetition from the first named segment" do 210 | query = new(@wiki) 211 | part = find_first(query, ~p"PID-11[2].3") 212 | assert part == "BIRMINGHAM" 213 | end 214 | 215 | test "extract a segment component from the first named segment" do 216 | part = new(@wiki) |> find_first(~p"OBX-2.1") 217 | assert part == "N" 218 | end 219 | 220 | test "extract a segment subcomponent from the first named segment" do 221 | part = new(@wiki) |> find_first(~p"OBX-2.2.2") 222 | assert part == "M" 223 | end 224 | 225 | test "extract multiple segment parts at once" do 226 | query = new(@wiki) 227 | part = find_all(query, ~p"OBX-6.2") 228 | assert part == ["Meter", "Kilogram"] 229 | 230 | assert part == get_parts(query, "OBX-6.2") 231 | end 232 | 233 | test "extract a segment value from the first sub-selected segment" do 234 | part = new(@wiki) |> select("PID") |> find_first(~p"11[1].5") 235 | assert part == "35209" 236 | end 237 | 238 | test "extract multiple segment values from the sub-selected segments" do 239 | parts = new(@wiki) |> select("OBX") |> find_all(~p"1") 240 | assert parts == ["1", "2"] 241 | end 242 | 243 | test "move data from parent to child selections" do 244 | value = 245 | HL7.Examples.nist_immunization_hl7() 246 | |> select("ORC RXA RXR {[OBX]}") 247 | |> data(fn q -> %{order_num: find_first(q, ~p"ORC-3.1")} end) 248 | |> select("OBX") 249 | |> update(~p"6", fn q -> get_datum(q, :order_num) end) 250 | |> root() 251 | |> find_first(~p"OBX-6") 252 | 253 | assert value == "IZ-783278" 254 | end 255 | 256 | test "block overwriting index data" do 257 | value = 258 | HL7.Examples.nist_immunization_hl7() 259 | |> select("ORC [RXA] [RXR] [{OBX}]") 260 | |> data(fn _q -> %{index: "not an index"} end) 261 | |> select("OBX") 262 | |> update(~p"6", fn q -> get_datum(q, :index) end) 263 | |> root() 264 | |> find_first(~p"OBX-6") 265 | 266 | assert value == 1 267 | end 268 | 269 | test "overwrite existing non-index data" do 270 | value = 271 | HL7.Examples.nist_immunization_hl7() 272 | |> select("ORC [RXA] [RXR] [{OBX}]") 273 | |> data(fn _q -> %{some_key: "not an index"} end) 274 | |> select("OBX") 275 | |> data(fn _q -> %{some_key: "overwritten"} end) 276 | |> update(~p"6", fn q -> get_datum(q, :some_key) end) 277 | |> root() 278 | |> find_first(~p"OBX-6") 279 | 280 | assert value == "overwritten" 281 | end 282 | 283 | test "associate data with invalid selections" do 284 | value = 285 | HL7.Examples.nist_immunization_hl7() 286 | |> select("ZZZ") 287 | |> data(fn _q -> %{index: "not an index"} end) 288 | |> update(~p"6", fn q -> get_datum(q, :index) end) 289 | |> root() 290 | |> find_first(~p"ZZZ-6") 291 | 292 | assert value == nil 293 | end 294 | 295 | test "get data from empty selection" do 296 | value = 297 | HL7.Examples.nist_immunization_hl7() 298 | |> select("ZZZ") 299 | |> get_datum(:order_num) 300 | 301 | assert value == nil 302 | end 303 | 304 | test "replace data from empty selection" do 305 | value = 306 | HL7.Examples.nist_immunization_hl7() 307 | |> select("ZZZ") 308 | |> update(~p"3", "no selections to replace") 309 | |> to_string() 310 | 311 | assert value == HL7.Examples.nist_immunization_hl7() 312 | end 313 | 314 | test "number set ids using query" do 315 | values = 316 | HL7.Examples.nist_immunization_hl7() 317 | |> select("OBX") 318 | |> number_set_ids() 319 | |> find_all(~p"1") 320 | 321 | assert values == ["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14"] 322 | end 323 | 324 | test "filter segment type by name" do 325 | segment_names = select(@wiki, "OBX [AL1] [DG1]") |> filter("OBX") |> get_segment_names() 326 | 327 | assert segment_names == ["OBX", "OBX"] 328 | end 329 | 330 | test "filter a list of segment types" do 331 | segment_names = 332 | select(@wiki, "OBX [AL1] [DG1]") |> filter(["OBX", "DG1"]) |> get_segment_names() 333 | 334 | assert segment_names == ["OBX", "OBX", "DG1"] 335 | end 336 | 337 | test "filter with a query function" do 338 | filter_func = fn q -> 339 | p = q |> find_first(~p"6.3") 340 | p == "ISO+" 341 | end 342 | 343 | segment_names = new(@wiki) |> filter(filter_func) |> get_segment_names() 344 | assert segment_names == ["OBX", "OBX"] 345 | end 346 | 347 | test "filter with a query function from raw text" do 348 | filter_func = fn q -> 349 | p = q |> find_first(~p"6.3") 350 | p == "ISO+" 351 | end 352 | 353 | segment_names = @wiki |> filter(filter_func) |> get_segment_names() 354 | assert segment_names == ["OBX", "OBX"] 355 | end 356 | 357 | test "filter segments from raw text" do 358 | segment_names = @nist |> filter("PID") |> get_segment_names() 359 | assert segment_names == ["PID"] 360 | end 361 | 362 | test "filter segments through a sub-select" do 363 | segment_names = 364 | select(@nist, "ORC {RXA} {RXR} [{OBX}]") 365 | |> select("OBX") 366 | |> filter("OBX") 367 | |> get_segment_names() 368 | 369 | assert segment_names == [ 370 | "OBX", 371 | "OBX", 372 | "OBX", 373 | "OBX", 374 | "OBX", 375 | "OBX", 376 | "OBX", 377 | "OBX", 378 | "OBX", 379 | "OBX", 380 | "OBX", 381 | "OBX", 382 | "OBX", 383 | "OBX" 384 | ] 385 | end 386 | 387 | test "filter segments through multiple sub-selects" do 388 | segment_names = 389 | select(@nist, "ORC {RXA} {RXR} [{OBX}]") 390 | |> select("ORC RXA") 391 | |> select("RXA") 392 | |> filter("RXA") 393 | |> get_segment_names() 394 | 395 | assert segment_names == ["RXA", "RXA"] 396 | end 397 | 398 | test "filter segments through invalid selections of a sub-select with a function" do 399 | segment_names = 400 | select(@nist, "ORC {RXA} {RXR} [{OBX}]") 401 | |> select("ZZZ") 402 | |> filter(fn q -> 403 | rem(get_index(q), 2) == 0 404 | end) 405 | |> get_segment_names() 406 | 407 | assert segment_names == [] 408 | end 409 | 410 | test "reject segments by name" do 411 | segment_names = select(@wiki, "OBX [AL1] [DG1]") |> reject("OBX") |> get_segment_names() 412 | 413 | assert segment_names == ["AL1", "DG1"] 414 | end 415 | 416 | test "reject segments by name from raw text" do 417 | segment_names = @wiki |> reject("OBX") |> get_segment_names() 418 | 419 | assert segment_names == ["MSH", "EVN", "PID", "PV1", "AL1", "DG1"] 420 | end 421 | 422 | test "reject segments by name from list data" do 423 | segment_names = 424 | select(@wiki, "OBX [AL1] [DG1]") 425 | |> get_segments() 426 | |> reject("OBX") 427 | |> get_segment_names() 428 | 429 | assert segment_names == ["AL1", "DG1"] 430 | end 431 | 432 | test "reject segments a list of segment types" do 433 | segment_names = 434 | select(@wiki, "OBX [AL1] [DG1]") |> reject(["OBX", "DG1"]) |> get_segment_names() 435 | 436 | assert segment_names == ["AL1"] 437 | end 438 | 439 | test "reject segments a list of segment types from raw text" do 440 | segment_names = @wiki |> reject(["OBX", "DG1"]) |> get_segment_names() 441 | 442 | assert segment_names == ["MSH", "EVN", "PID", "PV1", "AL1"] 443 | end 444 | 445 | test "reject segments a list of segment types from list data" do 446 | segment_names = 447 | select(@wiki, "OBX [AL1] [DG1]") 448 | |> get_segments() 449 | |> reject(["OBX", "DG1"]) 450 | |> get_segment_names() 451 | 452 | assert segment_names == ["AL1"] 453 | end 454 | 455 | test "reject with a query function" do 456 | filter_func = fn q -> 457 | p = q |> find_first(~p"1") 458 | p != "1" 459 | end 460 | 461 | segment_names = new(@wiki) |> reject(filter_func) |> get_segment_names() 462 | assert segment_names == ["OBX", "AL1", "DG1"] 463 | end 464 | 465 | test "reject with a query function from raw text" do 466 | filter_func = fn q -> 467 | p = q |> find_first(~p"1") 468 | p != "1" 469 | end 470 | 471 | segment_names = @wiki |> reject(filter_func) |> get_segment_names() 472 | assert segment_names == ["OBX", "AL1", "DG1"] 473 | end 474 | 475 | test "reject segments through invalid selections of a sub-select with a function" do 476 | segment_names = 477 | select(@nist, "ORC {RXA} {RXR} [{OBX}]") 478 | |> select("ZZZ") 479 | |> reject(fn q -> 480 | rem(get_index(q), 2) == 0 481 | end) 482 | |> get_segment_names() 483 | 484 | assert segment_names == [] 485 | end 486 | 487 | test "append a segment" do 488 | segment = ["ZZZ", "1", "sleep"] 489 | segment_names = select(@wiki, "OBX [AL1] [DG1]") |> append(segment) |> get_segment_names() 490 | assert segment_names == ["OBX", "ZZZ", "OBX", "AL1", "DG1", "ZZZ"] 491 | end 492 | 493 | test "append multiple segments" do 494 | segments = [["ZZ1", "1", "sleep"], ["ZZ2", "2", "more sleep"]] 495 | segment_names = select(@wiki, "OBX [AL1] [DG1]") |> append(segments) |> get_segment_names() 496 | assert segment_names == ["OBX", "ZZ1", "ZZ2", "OBX", "AL1", "DG1", "ZZ1", "ZZ2"] 497 | end 498 | 499 | test "prepend a segment" do 500 | segment = ["ZZZ", "1", "sleep"] 501 | segment_names = select(@wiki, "OBX [AL1] [DG1]") |> prepend(segment) |> get_segment_names() 502 | assert segment_names == ["ZZZ", "OBX", "ZZZ", "OBX", "AL1", "DG1"] 503 | end 504 | 505 | test "prepend multiple segments" do 506 | segments = [["ZZ1", "1", "sleep"], ["ZZ2", "2", "more sleep"]] 507 | segment_names = select(@wiki, "OBX [AL1] [DG1]") |> prepend(segments) |> get_segment_names() 508 | assert segment_names == ["ZZ1", "ZZ2", "OBX", "ZZ1", "ZZ2", "OBX", "AL1", "DG1"] 509 | end 510 | 511 | test "inject and retrieve replacements that build empty array data between elements while preserving content" do 512 | query = new(@wiki) |> update(~p"PID-5.2.3", fn q -> q.part <> " PHD" end) 513 | assert query |> find_first(~p"PID-5.2.3") == "BARRY PHD" 514 | assert query |> find_first(~p"PID-5.1") == "KLEINSAMPLE" 515 | 516 | query = new(@wiki) |> replace_parts("PID-5.2.3", fn q -> q.part <> " PHD" end) 517 | assert query |> find_first(~p"PID-5.2.3") == "BARRY PHD" 518 | assert query |> find_first(~p"PID-5.1") == "KLEINSAMPLE" 519 | end 520 | 521 | test "inject list content with a replace_parts" do 522 | query = new(@wiki) |> update(~p"PID-5.2", fn q -> [q.part, "PHD"] end) 523 | assert query |> find_first(~p"PID-5.2") == ["BARRY", "PHD"] 524 | assert query |> find_first(~p"PID-5.1") == "KLEINSAMPLE" 525 | end 526 | 527 | test "inject and retrieve replacements beyond the original segment field count" do 528 | query = new(@wiki) |> update(~p"AL1-5[2].3.4", "MODIFIED") 529 | assert query |> find_first(~p"AL1-5[2].3.4") == "MODIFIED" 530 | assert query |> find_first(~p"AL1-5[2].3.3") == "" 531 | assert query |> find_first(~p"AL1-5[2].2") == "" 532 | assert query |> find_first(~p"AL1-5[1]") == "" 533 | end 534 | 535 | test "reject z segments" do 536 | segments = [["ZZZ", "1", "sleep"], ["ZZZ", "2", "sleep more"]] 537 | 538 | segment_names = 539 | select(@wiki, "OBX [AL1] [DG1]") 540 | |> append(segments) 541 | |> reject_z_segments() 542 | |> get_segment_names() 543 | 544 | assert segment_names == ["OBX", "OBX", "AL1", "DG1"] 545 | end 546 | 547 | # test "delete selections with a function" do 548 | # segment_names = 549 | # select(@nist, "ORC {RXA} {RXR} [{OBX}]") 550 | # |> select("OBX") 551 | # |> delete(fn q -> 552 | # rem(get_index(q), 2) == 0 553 | # end) 554 | # |> get_segment_names() 555 | # 556 | # assert segment_names == ["OBX", "OBX", "OBX", "OBX", "OBX", "OBX", "OBX"] 557 | # end 558 | 559 | test "replace selections with a function" do 560 | segment_names = 561 | select(@nist, "ORC {RXA} {RXR} [{OBX}]") 562 | |> select("OBX") 563 | |> replace(fn q -> [["ZZZ", get_index(q), "sleep"]] end) 564 | |> get_segment_names() 565 | 566 | assert segment_names == [ 567 | "ZZZ", 568 | "ZZZ", 569 | "ZZZ", 570 | "ZZZ", 571 | "ZZZ", 572 | "ZZZ", 573 | "ZZZ", 574 | "ZZZ", 575 | "ZZZ", 576 | "ZZZ", 577 | "ZZZ", 578 | "ZZZ", 579 | "ZZZ", 580 | "ZZZ" 581 | ] 582 | end 583 | 584 | test "replace invalid selections with a function" do 585 | segment_names = 586 | select(@nist, "ORC {RXA} {RXR} [{OBX}]") 587 | |> select("ZZZ") 588 | |> replace(fn q -> [["ZZZ", get_index(q), "sleep"]] end) 589 | |> get_segment_names() 590 | 591 | assert segment_names == [] 592 | end 593 | 594 | test "Example HL7 roundtrips after going from raw to select back to raw" do 595 | raw_text = HL7.Examples.wikipedia_sample_hl7() 596 | roundtrip = raw_text |> HL7.Message.new() |> HL7.Query.new() |> to_string() 597 | assert roundtrip == raw_text 598 | end 599 | 600 | test "Can output query selections to console" do 601 | raw_text = HL7.Examples.wikipedia_sample_hl7() 602 | output = capture_io(fn -> raw_text |> new() |> to_console() end) 603 | assert String.length(output) == 1500 604 | end 605 | 606 | test "Filter segments and map selections" do 607 | result = 608 | HL7.Examples.nist_immunization_hl7() 609 | |> select("ORC [RXA] [RXR] {OBX}") 610 | |> filter(fn q -> find_first(q, ~p"3.2") == "vaccine type" end) 611 | |> map(fn q -> find_all(q, ~p"5.2") end) 612 | 613 | assert result == [ 614 | ["Influenza, unspecified formulation"], 615 | ["DTaP", "Polio", "Hep B, unspecified formulation"] 616 | ] 617 | end 618 | 619 | test "filter selections with a function" do 620 | segment_names = 621 | HL7.Examples.nist_immunization_hl7() 622 | |> select("OBX") 623 | |> select(fn q -> find_first(q, ~p"1") != "1" end) 624 | |> delete() 625 | |> root() 626 | |> get_segment_names() 627 | 628 | assert segment_names == [ 629 | "MSH", 630 | "PID", 631 | "ORC", 632 | "RXA", 633 | "RXR", 634 | "OBX", 635 | "ORC", 636 | "RXA", 637 | "ORC", 638 | "RXA", 639 | "RXR", 640 | "OBX" 641 | ] 642 | end 643 | 644 | test "filter invalid selections with a function" do 645 | msg = 646 | HL7.Examples.nist_immunization_hl7() 647 | |> select("ZZZ") 648 | |> select(fn q -> find_first(q, ~p"1") != "1" end) 649 | |> delete() 650 | |> to_string() 651 | 652 | assert msg == HL7.Examples.nist_immunization_hl7() 653 | end 654 | 655 | test "reject invalid selections with a function" do 656 | msg = 657 | HL7.Examples.nist_immunization_hl7() 658 | |> select("ZZZ") 659 | |> select(fn q -> find_first(q, ~p"1") == "1" end) 660 | |> delete() 661 | |> to_string() 662 | 663 | assert msg == HL7.Examples.nist_immunization_hl7() 664 | end 665 | end 666 | -------------------------------------------------------------------------------- /lib/hl7/path_parser.ex: -------------------------------------------------------------------------------- 1 | # Generated from parsec_source/path_parser.ex.exs, do not edit. 2 | # Generated at 2024-04-25 18:02:03Z. 3 | 4 | defmodule HL7.PathParser do 5 | @moduledoc false 6 | 7 | @doc """ 8 | Parses the given `binary` as parse. 9 | 10 | Returns `{:ok, [token], rest, context, position, byte_offset}` or 11 | `{:error, reason, rest, context, line, byte_offset}` where `position` 12 | describes the location of the parse (start position) as `{line, offset_to_start_of_line}`. 13 | 14 | To column where the error occurred can be inferred from `byte_offset - offset_to_start_of_line`. 15 | 16 | ## Options 17 | 18 | * `:byte_offset` - the byte offset for the whole binary, defaults to 0 19 | * `:line` - the line and the byte offset into that line, defaults to `{1, byte_offset}` 20 | * `:context` - the initial context value. It will be converted to a map 21 | 22 | """ 23 | @spec parse(binary, keyword) :: 24 | {:ok, [term], rest, context, line, byte_offset} 25 | | {:error, reason, rest, context, line, byte_offset} 26 | when line: {pos_integer, byte_offset}, 27 | byte_offset: pos_integer, 28 | rest: binary, 29 | reason: String.t(), 30 | context: map 31 | def parse(binary, opts \\ []) when is_binary(binary) do 32 | context = Map.new(Keyword.get(opts, :context, [])) 33 | byte_offset = Keyword.get(opts, :byte_offset, 0) 34 | 35 | line = 36 | case Keyword.get(opts, :line, 1) do 37 | {_, _} = line -> line 38 | line -> {line, byte_offset} 39 | end 40 | 41 | case parse__0(binary, [], [], context, line, byte_offset) do 42 | {:ok, acc, rest, context, line, offset} -> 43 | {:ok, :lists.reverse(acc), rest, context, line, offset} 44 | 45 | {:error, _, _, _, _, _} = error -> 46 | error 47 | end 48 | end 49 | 50 | defp parse__0(rest, acc, stack, context, line, offset) do 51 | parse__4(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 52 | end 53 | 54 | defp parse__2(rest, acc, [_, previous_acc | stack], context, line, offset) do 55 | parse__1(rest, acc ++ previous_acc, stack, context, line, offset) 56 | end 57 | 58 | defp parse__3(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 59 | parse__2(rest, [], stack, context, line, offset) 60 | end 61 | 62 | defp parse__4(rest, acc, stack, context, line, offset) do 63 | parse__5(rest, [], [acc | stack], context, line, offset) 64 | end 65 | 66 | defp parse__5(rest, acc, stack, context, line, offset) do 67 | parse__6(rest, [], [acc | stack], context, line, offset) 68 | end 69 | 70 | defp parse__6(<>, acc, stack, context, comb__line, comb__offset) 71 | when x0 >= 65 and x0 <= 90 do 72 | parse__7(rest, [<>] ++ acc, stack, context, comb__line, comb__offset + 1) 73 | end 74 | 75 | defp parse__6(rest, _acc, stack, context, line, offset) do 76 | [_, acc | stack] = stack 77 | parse__3(rest, acc, stack, context, line, offset) 78 | end 79 | 80 | defp parse__7(<>, acc, stack, context, comb__line, comb__offset) 81 | when x0 >= 48 and x0 <= 57 do 82 | parse__8(rest, [<>] ++ acc, stack, context, comb__line, comb__offset + 1) 83 | end 84 | 85 | defp parse__7(<>, acc, stack, context, comb__line, comb__offset) 86 | when x0 >= 65 and x0 <= 90 do 87 | parse__8(rest, [<>] ++ acc, stack, context, comb__line, comb__offset + 1) 88 | end 89 | 90 | defp parse__7(rest, _acc, stack, context, line, offset) do 91 | [_, acc | stack] = stack 92 | parse__3(rest, acc, stack, context, line, offset) 93 | end 94 | 95 | defp parse__8(<>, acc, stack, context, comb__line, comb__offset) 96 | when x0 >= 48 and x0 <= 57 do 97 | parse__9(rest, [<>] ++ acc, stack, context, comb__line, comb__offset + 1) 98 | end 99 | 100 | defp parse__8(<>, acc, stack, context, comb__line, comb__offset) 101 | when x0 >= 65 and x0 <= 90 do 102 | parse__9(rest, [<>] ++ acc, stack, context, comb__line, comb__offset + 1) 103 | end 104 | 105 | defp parse__8(rest, _acc, stack, context, line, offset) do 106 | [_, acc | stack] = stack 107 | parse__3(rest, acc, stack, context, line, offset) 108 | end 109 | 110 | defp parse__9(rest, user_acc, [acc | stack], context, line, offset) do 111 | _ = user_acc 112 | 113 | parse__10( 114 | rest, 115 | [Enum.join(:lists.reverse(user_acc), "")] ++ acc, 116 | stack, 117 | context, 118 | line, 119 | offset 120 | ) 121 | end 122 | 123 | defp parse__10(rest, user_acc, [acc | stack], context, line, offset) do 124 | _ = user_acc 125 | parse__11(rest, [segment: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset) 126 | end 127 | 128 | defp parse__11(rest, acc, stack, context, line, offset) do 129 | parse__12(rest, [], [acc | stack], context, line, offset) 130 | end 131 | 132 | defp parse__12(rest, acc, stack, context, line, offset) do 133 | parse__16(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 134 | end 135 | 136 | defp parse__14(rest, acc, [_, previous_acc | stack], context, line, offset) do 137 | parse__13(rest, acc ++ previous_acc, stack, context, line, offset) 138 | end 139 | 140 | defp parse__15(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 141 | parse__14(rest, [], stack, context, line, offset) 142 | end 143 | 144 | defp parse__16(<<"[", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 145 | parse__17(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1) 146 | end 147 | 148 | defp parse__16(rest, acc, stack, context, line, offset) do 149 | parse__15(rest, acc, stack, context, line, offset) 150 | end 151 | 152 | defp parse__17(rest, acc, stack, context, line, offset) do 153 | parse__22(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 154 | end 155 | 156 | defp parse__19(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 157 | parse__20(rest, ["*"] ++ acc, stack, context, comb__line, comb__offset + 1) 158 | end 159 | 160 | defp parse__19(rest, _acc, stack, context, line, offset) do 161 | [_, acc | stack] = stack 162 | parse__15(rest, acc, stack, context, line, offset) 163 | end 164 | 165 | defp parse__20(rest, acc, [_, previous_acc | stack], context, line, offset) do 166 | parse__18(rest, acc ++ previous_acc, stack, context, line, offset) 167 | end 168 | 169 | defp parse__21(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 170 | parse__19(rest, [], stack, context, line, offset) 171 | end 172 | 173 | defp parse__22(rest, acc, stack, context, line, offset) do 174 | parse__23(rest, [], [acc | stack], context, line, offset) 175 | end 176 | 177 | defp parse__23(<>, acc, stack, context, comb__line, comb__offset) 178 | when x0 >= 48 and x0 <= 57 do 179 | parse__24(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1) 180 | end 181 | 182 | defp parse__23(rest, _acc, stack, context, line, offset) do 183 | [acc | stack] = stack 184 | parse__21(rest, acc, stack, context, line, offset) 185 | end 186 | 187 | defp parse__24(<>, acc, stack, context, comb__line, comb__offset) 188 | when x0 >= 48 and x0 <= 57 do 189 | parse__26(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1) 190 | end 191 | 192 | defp parse__24(rest, acc, stack, context, line, offset) do 193 | parse__25(rest, acc, stack, context, line, offset) 194 | end 195 | 196 | defp parse__26(rest, acc, stack, context, line, offset) do 197 | parse__24(rest, acc, stack, context, line, offset) 198 | end 199 | 200 | defp parse__25(rest, user_acc, [acc | stack], context, line, offset) do 201 | _ = user_acc 202 | 203 | parse__27( 204 | rest, 205 | ( 206 | [head | tail] = :lists.reverse(user_acc) 207 | [:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)] 208 | ) ++ acc, 209 | stack, 210 | context, 211 | line, 212 | offset 213 | ) 214 | end 215 | 216 | defp parse__27(rest, acc, [_, previous_acc | stack], context, line, offset) do 217 | parse__18(rest, acc ++ previous_acc, stack, context, line, offset) 218 | end 219 | 220 | defp parse__18(<<"]", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 221 | parse__28(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1) 222 | end 223 | 224 | defp parse__18(rest, acc, stack, context, line, offset) do 225 | parse__15(rest, acc, stack, context, line, offset) 226 | end 227 | 228 | defp parse__28(rest, acc, [_, previous_acc | stack], context, line, offset) do 229 | parse__13(rest, acc ++ previous_acc, stack, context, line, offset) 230 | end 231 | 232 | defp parse__13(rest, user_acc, [acc | stack], context, line, offset) do 233 | _ = user_acc 234 | 235 | parse__29( 236 | rest, 237 | [segment_number: :lists.reverse(user_acc)] ++ acc, 238 | stack, 239 | context, 240 | line, 241 | offset 242 | ) 243 | end 244 | 245 | defp parse__29(rest, acc, [_, previous_acc | stack], context, line, offset) do 246 | parse__1(rest, acc ++ previous_acc, stack, context, line, offset) 247 | end 248 | 249 | defp parse__1(rest, acc, stack, context, line, offset) do 250 | parse__33(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 251 | end 252 | 253 | defp parse__31(rest, acc, [_, previous_acc | stack], context, line, offset) do 254 | parse__30(rest, acc ++ previous_acc, stack, context, line, offset) 255 | end 256 | 257 | defp parse__32(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 258 | parse__31(rest, [], stack, context, line, offset) 259 | end 260 | 261 | defp parse__33(rest, acc, stack, context, line, offset) do 262 | parse__34(rest, [], [acc | stack], context, line, offset) 263 | end 264 | 265 | defp parse__34(<<"-", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 266 | parse__35(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1) 267 | end 268 | 269 | defp parse__34(<>, acc, stack, context, comb__line, comb__offset) do 270 | parse__35(rest, [] ++ acc, stack, context, comb__line, comb__offset) 271 | end 272 | 273 | defp parse__35(rest, acc, stack, context, line, offset) do 274 | parse__40(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 275 | end 276 | 277 | defp parse__37(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 278 | parse__38(rest, ["*"] ++ acc, stack, context, comb__line, comb__offset + 1) 279 | end 280 | 281 | defp parse__37(rest, _acc, stack, context, line, offset) do 282 | [_, _, acc | stack] = stack 283 | parse__32(rest, acc, stack, context, line, offset) 284 | end 285 | 286 | defp parse__38(rest, acc, [_, previous_acc | stack], context, line, offset) do 287 | parse__36(rest, acc ++ previous_acc, stack, context, line, offset) 288 | end 289 | 290 | defp parse__39(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 291 | parse__37(rest, [], stack, context, line, offset) 292 | end 293 | 294 | defp parse__40(rest, acc, stack, context, line, offset) do 295 | parse__41(rest, [], [acc | stack], context, line, offset) 296 | end 297 | 298 | defp parse__41(<>, acc, stack, context, comb__line, comb__offset) 299 | when x0 >= 48 and x0 <= 57 do 300 | parse__42(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1) 301 | end 302 | 303 | defp parse__41(rest, _acc, stack, context, line, offset) do 304 | [acc | stack] = stack 305 | parse__39(rest, acc, stack, context, line, offset) 306 | end 307 | 308 | defp parse__42(<>, acc, stack, context, comb__line, comb__offset) 309 | when x0 >= 48 and x0 <= 57 do 310 | parse__44(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1) 311 | end 312 | 313 | defp parse__42(rest, acc, stack, context, line, offset) do 314 | parse__43(rest, acc, stack, context, line, offset) 315 | end 316 | 317 | defp parse__44(rest, acc, stack, context, line, offset) do 318 | parse__42(rest, acc, stack, context, line, offset) 319 | end 320 | 321 | defp parse__43(rest, user_acc, [acc | stack], context, line, offset) do 322 | _ = user_acc 323 | 324 | parse__45( 325 | rest, 326 | ( 327 | [head | tail] = :lists.reverse(user_acc) 328 | [:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)] 329 | ) ++ acc, 330 | stack, 331 | context, 332 | line, 333 | offset 334 | ) 335 | end 336 | 337 | defp parse__45(rest, acc, [_, previous_acc | stack], context, line, offset) do 338 | parse__36(rest, acc ++ previous_acc, stack, context, line, offset) 339 | end 340 | 341 | defp parse__36(rest, user_acc, [acc | stack], context, line, offset) do 342 | _ = user_acc 343 | parse__46(rest, [field: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset) 344 | end 345 | 346 | defp parse__46(rest, acc, [_, previous_acc | stack], context, line, offset) do 347 | parse__30(rest, acc ++ previous_acc, stack, context, line, offset) 348 | end 349 | 350 | defp parse__30(rest, acc, stack, context, line, offset) do 351 | parse__50(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 352 | end 353 | 354 | defp parse__50(rest, acc, stack, context, line, offset) do 355 | parse__51(rest, [], [acc | stack], context, line, offset) 356 | end 357 | 358 | defp parse__51(rest, acc, stack, context, line, offset) do 359 | parse__55(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 360 | end 361 | 362 | defp parse__53(rest, acc, [_, previous_acc | stack], context, line, offset) do 363 | parse__52(rest, acc ++ previous_acc, stack, context, line, offset) 364 | end 365 | 366 | defp parse__54(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 367 | parse__53(rest, [], stack, context, line, offset) 368 | end 369 | 370 | defp parse__55(<<"[", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 371 | parse__56(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1) 372 | end 373 | 374 | defp parse__55(rest, acc, stack, context, line, offset) do 375 | parse__54(rest, acc, stack, context, line, offset) 376 | end 377 | 378 | defp parse__56(rest, acc, stack, context, line, offset) do 379 | parse__61(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 380 | end 381 | 382 | defp parse__58(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 383 | parse__59(rest, ["*"] ++ acc, stack, context, comb__line, comb__offset + 1) 384 | end 385 | 386 | defp parse__58(rest, _acc, stack, context, line, offset) do 387 | [_, acc | stack] = stack 388 | parse__54(rest, acc, stack, context, line, offset) 389 | end 390 | 391 | defp parse__59(rest, acc, [_, previous_acc | stack], context, line, offset) do 392 | parse__57(rest, acc ++ previous_acc, stack, context, line, offset) 393 | end 394 | 395 | defp parse__60(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 396 | parse__58(rest, [], stack, context, line, offset) 397 | end 398 | 399 | defp parse__61(rest, acc, stack, context, line, offset) do 400 | parse__62(rest, [], [acc | stack], context, line, offset) 401 | end 402 | 403 | defp parse__62(<>, acc, stack, context, comb__line, comb__offset) 404 | when x0 >= 48 and x0 <= 57 do 405 | parse__63(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1) 406 | end 407 | 408 | defp parse__62(rest, _acc, stack, context, line, offset) do 409 | [acc | stack] = stack 410 | parse__60(rest, acc, stack, context, line, offset) 411 | end 412 | 413 | defp parse__63(<>, acc, stack, context, comb__line, comb__offset) 414 | when x0 >= 48 and x0 <= 57 do 415 | parse__65(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1) 416 | end 417 | 418 | defp parse__63(rest, acc, stack, context, line, offset) do 419 | parse__64(rest, acc, stack, context, line, offset) 420 | end 421 | 422 | defp parse__65(rest, acc, stack, context, line, offset) do 423 | parse__63(rest, acc, stack, context, line, offset) 424 | end 425 | 426 | defp parse__64(rest, user_acc, [acc | stack], context, line, offset) do 427 | _ = user_acc 428 | 429 | parse__66( 430 | rest, 431 | ( 432 | [head | tail] = :lists.reverse(user_acc) 433 | [:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)] 434 | ) ++ acc, 435 | stack, 436 | context, 437 | line, 438 | offset 439 | ) 440 | end 441 | 442 | defp parse__66(rest, acc, [_, previous_acc | stack], context, line, offset) do 443 | parse__57(rest, acc ++ previous_acc, stack, context, line, offset) 444 | end 445 | 446 | defp parse__57(<<"]", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 447 | parse__67(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1) 448 | end 449 | 450 | defp parse__57(rest, acc, stack, context, line, offset) do 451 | parse__54(rest, acc, stack, context, line, offset) 452 | end 453 | 454 | defp parse__67(rest, acc, [_, previous_acc | stack], context, line, offset) do 455 | parse__52(rest, acc ++ previous_acc, stack, context, line, offset) 456 | end 457 | 458 | defp parse__52(rest, user_acc, [acc | stack], context, line, offset) do 459 | _ = user_acc 460 | parse__68(rest, [repetition: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset) 461 | end 462 | 463 | defp parse__68(rest, acc, [_, previous_acc | stack], context, line, offset) do 464 | parse__47(rest, acc ++ previous_acc, stack, context, line, offset) 465 | end 466 | 467 | defp parse__47(rest, acc, stack, context, line, offset) do 468 | parse__72(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 469 | end 470 | 471 | defp parse__70(rest, acc, [_, previous_acc | stack], context, line, offset) do 472 | parse__69(rest, acc ++ previous_acc, stack, context, line, offset) 473 | end 474 | 475 | defp parse__71(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 476 | parse__70(rest, [], stack, context, line, offset) 477 | end 478 | 479 | defp parse__72(<<".", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 480 | parse__73(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1) 481 | end 482 | 483 | defp parse__72(rest, acc, stack, context, line, offset) do 484 | parse__71(rest, acc, stack, context, line, offset) 485 | end 486 | 487 | defp parse__73(rest, acc, stack, context, line, offset) do 488 | parse__74(rest, [], [acc | stack], context, line, offset) 489 | end 490 | 491 | defp parse__74(rest, acc, stack, context, line, offset) do 492 | parse__79(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 493 | end 494 | 495 | defp parse__76(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 496 | parse__77(rest, ["*"] ++ acc, stack, context, comb__line, comb__offset + 1) 497 | end 498 | 499 | defp parse__76(rest, _acc, stack, context, line, offset) do 500 | [_, _, acc | stack] = stack 501 | parse__71(rest, acc, stack, context, line, offset) 502 | end 503 | 504 | defp parse__77(rest, acc, [_, previous_acc | stack], context, line, offset) do 505 | parse__75(rest, acc ++ previous_acc, stack, context, line, offset) 506 | end 507 | 508 | defp parse__78(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 509 | parse__76(rest, [], stack, context, line, offset) 510 | end 511 | 512 | defp parse__79(rest, acc, stack, context, line, offset) do 513 | parse__80(rest, [], [acc | stack], context, line, offset) 514 | end 515 | 516 | defp parse__80(<>, acc, stack, context, comb__line, comb__offset) 517 | when x0 >= 48 and x0 <= 57 do 518 | parse__81(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1) 519 | end 520 | 521 | defp parse__80(rest, _acc, stack, context, line, offset) do 522 | [acc | stack] = stack 523 | parse__78(rest, acc, stack, context, line, offset) 524 | end 525 | 526 | defp parse__81(<>, acc, stack, context, comb__line, comb__offset) 527 | when x0 >= 48 and x0 <= 57 do 528 | parse__83(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1) 529 | end 530 | 531 | defp parse__81(rest, acc, stack, context, line, offset) do 532 | parse__82(rest, acc, stack, context, line, offset) 533 | end 534 | 535 | defp parse__83(rest, acc, stack, context, line, offset) do 536 | parse__81(rest, acc, stack, context, line, offset) 537 | end 538 | 539 | defp parse__82(rest, user_acc, [acc | stack], context, line, offset) do 540 | _ = user_acc 541 | 542 | parse__84( 543 | rest, 544 | ( 545 | [head | tail] = :lists.reverse(user_acc) 546 | [:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)] 547 | ) ++ acc, 548 | stack, 549 | context, 550 | line, 551 | offset 552 | ) 553 | end 554 | 555 | defp parse__84(rest, acc, [_, previous_acc | stack], context, line, offset) do 556 | parse__75(rest, acc ++ previous_acc, stack, context, line, offset) 557 | end 558 | 559 | defp parse__75(rest, user_acc, [acc | stack], context, line, offset) do 560 | _ = user_acc 561 | parse__85(rest, [component: :lists.reverse(user_acc)] ++ acc, stack, context, line, offset) 562 | end 563 | 564 | defp parse__85(rest, acc, stack, context, line, offset) do 565 | parse__89(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 566 | end 567 | 568 | defp parse__87(rest, acc, [_, previous_acc | stack], context, line, offset) do 569 | parse__86(rest, acc ++ previous_acc, stack, context, line, offset) 570 | end 571 | 572 | defp parse__88(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 573 | parse__87(rest, [], stack, context, line, offset) 574 | end 575 | 576 | defp parse__89(<<".", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 577 | parse__90(rest, [] ++ acc, stack, context, comb__line, comb__offset + 1) 578 | end 579 | 580 | defp parse__89(rest, acc, stack, context, line, offset) do 581 | parse__88(rest, acc, stack, context, line, offset) 582 | end 583 | 584 | defp parse__90(rest, acc, stack, context, line, offset) do 585 | parse__91(rest, [], [acc | stack], context, line, offset) 586 | end 587 | 588 | defp parse__91(rest, acc, stack, context, line, offset) do 589 | parse__96(rest, [], [{rest, context, line, offset}, acc | stack], context, line, offset) 590 | end 591 | 592 | defp parse__93(<<"*", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 593 | parse__94(rest, ["*"] ++ acc, stack, context, comb__line, comb__offset + 1) 594 | end 595 | 596 | defp parse__93(rest, _acc, stack, context, line, offset) do 597 | [_, _, acc | stack] = stack 598 | parse__88(rest, acc, stack, context, line, offset) 599 | end 600 | 601 | defp parse__94(rest, acc, [_, previous_acc | stack], context, line, offset) do 602 | parse__92(rest, acc ++ previous_acc, stack, context, line, offset) 603 | end 604 | 605 | defp parse__95(_, _, [{rest, context, line, offset} | _] = stack, _, _, _) do 606 | parse__93(rest, [], stack, context, line, offset) 607 | end 608 | 609 | defp parse__96(rest, acc, stack, context, line, offset) do 610 | parse__97(rest, [], [acc | stack], context, line, offset) 611 | end 612 | 613 | defp parse__97(<>, acc, stack, context, comb__line, comb__offset) 614 | when x0 >= 48 and x0 <= 57 do 615 | parse__98(rest, [x0 - 48] ++ acc, stack, context, comb__line, comb__offset + 1) 616 | end 617 | 618 | defp parse__97(rest, _acc, stack, context, line, offset) do 619 | [acc | stack] = stack 620 | parse__95(rest, acc, stack, context, line, offset) 621 | end 622 | 623 | defp parse__98(<>, acc, stack, context, comb__line, comb__offset) 624 | when x0 >= 48 and x0 <= 57 do 625 | parse__100(rest, [x0] ++ acc, stack, context, comb__line, comb__offset + 1) 626 | end 627 | 628 | defp parse__98(rest, acc, stack, context, line, offset) do 629 | parse__99(rest, acc, stack, context, line, offset) 630 | end 631 | 632 | defp parse__100(rest, acc, stack, context, line, offset) do 633 | parse__98(rest, acc, stack, context, line, offset) 634 | end 635 | 636 | defp parse__99(rest, user_acc, [acc | stack], context, line, offset) do 637 | _ = user_acc 638 | 639 | parse__101( 640 | rest, 641 | ( 642 | [head | tail] = :lists.reverse(user_acc) 643 | [:lists.foldl(fn x, acc -> x - 48 + acc * 10 end, head, tail)] 644 | ) ++ acc, 645 | stack, 646 | context, 647 | line, 648 | offset 649 | ) 650 | end 651 | 652 | defp parse__101(rest, acc, [_, previous_acc | stack], context, line, offset) do 653 | parse__92(rest, acc ++ previous_acc, stack, context, line, offset) 654 | end 655 | 656 | defp parse__92(rest, user_acc, [acc | stack], context, line, offset) do 657 | _ = user_acc 658 | 659 | parse__102( 660 | rest, 661 | [subcomponent: :lists.reverse(user_acc)] ++ acc, 662 | stack, 663 | context, 664 | line, 665 | offset 666 | ) 667 | end 668 | 669 | defp parse__102(rest, acc, [_, previous_acc | stack], context, line, offset) do 670 | parse__86(rest, acc ++ previous_acc, stack, context, line, offset) 671 | end 672 | 673 | defp parse__86(rest, acc, [_, previous_acc | stack], context, line, offset) do 674 | parse__69(rest, acc ++ previous_acc, stack, context, line, offset) 675 | end 676 | 677 | defp parse__69(<<"!", rest::binary>>, acc, stack, context, comb__line, comb__offset) do 678 | parse__103(rest, [truncate: [true]] ++ acc, stack, context, comb__line, comb__offset + 1) 679 | end 680 | 681 | defp parse__69(<>, acc, stack, context, comb__line, comb__offset) do 682 | parse__103(rest, [] ++ acc, stack, context, comb__line, comb__offset) 683 | end 684 | 685 | defp parse__103(<<""::binary>>, acc, stack, context, comb__line, comb__offset) do 686 | parse__104("", [] ++ acc, stack, context, comb__line, comb__offset) 687 | end 688 | 689 | defp parse__103(rest, _acc, _stack, context, line, offset) do 690 | {:error, "expected end of string", rest, context, line, offset} 691 | end 692 | 693 | defp parse__104(rest, acc, _stack, context, line, offset) do 694 | {:ok, acc, rest, context, line, offset} 695 | end 696 | end 697 | -------------------------------------------------------------------------------- /test/hl7_test.exs: -------------------------------------------------------------------------------- 1 | defmodule HL7Test do 2 | use ExUnit.Case 3 | 4 | import HL7.TempFileCase 5 | use HL7.TempFileCase 6 | 7 | # ^K - VT (Vertical Tab) - 0x0B 8 | @sb "\v" 9 | # ^\ - FS (File Separator) 10 | @eb <<0x1C>> 11 | # ^M - CR (Carriage Return) - 0x0D 12 | @cr "\r" 13 | @ending @eb <> @cr 14 | 15 | @wiki_text HL7.Examples.wikipedia_sample_hl7() 16 | 17 | doctest HL7 18 | import HL7 19 | 20 | describe "HL7.get/2" do 21 | test "can get segment as map" do 22 | segment_maps = @wiki_text |> new!() 23 | pid = get(segment_maps, ~p"PID") 24 | assert match?(%{0 => "PID"}, pid) 25 | end 26 | 27 | test "can get data from a segment as map using partial path from segment" do 28 | pid = @wiki_text |> new!() |> get(~p"PID") 29 | assert "PI" == get(pid, ~p"3.5") 30 | end 31 | 32 | test "can get data from a segment as map using partial from repetition" do 33 | rep = @wiki_text |> new!() |> get(~p"PID-3") 34 | assert "PI" == get(rep, ~p".5") 35 | end 36 | 37 | test "can get field data if run against full HL7" do 38 | msg = @wiki_text |> new!() 39 | assert ["SuperOE", "", "KLEINSAMPLE", "", "1.80", "79", nil, ""] == get(msg, ~p"5!") 40 | end 41 | 42 | test "can get filter data by segment if run against selection of segments" do 43 | segments = @wiki_text |> new!() |> get_segments() 44 | assert ["1.80", "79"] == get(segments, ~p"OBX[*]-5!") 45 | end 46 | 47 | test "can get field data if run against selection of segments" do 48 | segments = @wiki_text |> new!() |> get_segments() |> Enum.take(3) 49 | assert ["SuperOE", "", "KLEINSAMPLE"] == get(segments, ~p"5!") 50 | end 51 | 52 | test "can get component data if run against selection of segments" do 53 | segments = @wiki_text |> new!() |> get_segments() 54 | assert [nil, nil, nil, nil, "K", nil, nil, nil] == get(segments, ~p"2.2!") 55 | end 56 | 57 | test "can raise if partial path for repetition is run against full segment" do 58 | pid = @wiki_text |> new!() |> get(~p"PID") 59 | assert_raise RuntimeError, fn -> get(pid, ~p".5") end 60 | end 61 | 62 | test "can get no segment as nil" do 63 | segment_maps = @wiki_text |> new!() 64 | result = get(segment_maps, ~p"ZZZ") 65 | assert is_nil(result) 66 | end 67 | 68 | test "can get multiple segments as list of maps" do 69 | segment_maps = @wiki_text |> new!() 70 | result = get(segment_maps, ~p"OBX[*]") 71 | assert match?([%{0 => "OBX"}, %{0 => "OBX"}], result) 72 | end 73 | 74 | test "can get lack of multiple segments as empty list" do 75 | segment_maps = @wiki_text |> new!() 76 | result = get(segment_maps, ~p"ZZZ[*]") 77 | assert [] == result 78 | end 79 | 80 | test "can get field as map (of 1st default repetition)" do 81 | segment_maps = @wiki_text |> new!() 82 | result = get(segment_maps, ~p"PID-11") 83 | 84 | assert %{ 85 | 1 => "260 GOODWIN CREST DRIVE", 86 | 3 => "BIRMINGHAM", 87 | 4 => "AL", 88 | 5 => "35209", 89 | 7 => "M" 90 | } == result 91 | end 92 | 93 | test "can get missing field as nil" do 94 | segment_maps = @wiki_text |> new!() 95 | assert nil == get(segment_maps, ~p"PID-25") 96 | end 97 | 98 | test "can get missing field as nil with an exclamation path" do 99 | segment_maps = @wiki_text |> new!() 100 | assert nil == get(segment_maps, ~p"PID-25!") 101 | end 102 | 103 | test "can get repetition as map" do 104 | segment_maps = @wiki_text |> new!() 105 | result = get(segment_maps, ~p"PID-11[2]") 106 | 107 | assert %{ 108 | 1 => "NICKELL’S PICKLES", 109 | 2 => "10000 W 100TH AVE", 110 | 3 => "BIRMINGHAM", 111 | 4 => "AL", 112 | 5 => "35200", 113 | 7 => "O" 114 | } == result 115 | end 116 | 117 | test "can get all repetitions as list of maps" do 118 | segment_maps = @wiki_text |> new!() 119 | result = get(segment_maps, ~p"PID-11[*]") 120 | 121 | assert [ 122 | %{ 123 | 1 => "260 GOODWIN CREST DRIVE", 124 | 3 => "BIRMINGHAM", 125 | 4 => "AL", 126 | 5 => "35209", 127 | 7 => "M" 128 | }, 129 | %{ 130 | 1 => "NICKELL’S PICKLES", 131 | 2 => "10000 W 100TH AVE", 132 | 3 => "BIRMINGHAM", 133 | 4 => "AL", 134 | 5 => "35200", 135 | 7 => "O" 136 | } 137 | ] == result 138 | end 139 | 140 | test "can get all repetitions when field contains only a string" do 141 | assert ["M"] == @wiki_text |> new!() |> get(~p"PID-8[*]") 142 | end 143 | 144 | test "can get first repetitions or field as the same value when it contains only a string" do 145 | assert "M" = @wiki_text |> new!() |> get(~p"PID-8[1]") 146 | assert "M" = @wiki_text |> new!() |> get(~p"PID-8") 147 | end 148 | 149 | test "can get across a list of repetitions" do 150 | reps = @wiki_text |> new!() |> get(~p"PID-11[*]") 151 | assert ["35209", "35200"] == get(reps, ~p".5") 152 | end 153 | 154 | test "can get nothing across an empty list of repetitions" do 155 | assert [] == get([], ~p".5") 156 | end 157 | 158 | test "can get in a repetition" do 159 | rep = @wiki_text |> new!() |> get(~p"PID-11[2]") 160 | assert "35200" == get(rep, ~p".5") 161 | end 162 | 163 | test "can raise if getting a larger path directly against a repetition" do 164 | rep = @wiki_text |> new!() |> get(~p"PID-11[2]") 165 | assert_raise RuntimeError, fn -> get(rep, ~p"2.2") end 166 | end 167 | 168 | test "can get nil for missing component" do 169 | assert nil == @wiki_text |> new!() |> get(~p"PID-8.2") 170 | end 171 | 172 | test "can get components in all repetitions as list of values" do 173 | segment_maps = @wiki_text |> new!() 174 | result = get(segment_maps, ~p"PID-11[*].5") 175 | assert ["35209", "35200"] == result 176 | end 177 | 178 | test "can get components in all repetitions for all segments as a nested list of values" do 179 | segment_maps = @wiki_text |> new!() 180 | result = get(segment_maps, ~p"PID[*]-11[*].5") 181 | assert [["35209", "35200"]] == result 182 | end 183 | 184 | test "can get components in one repetition" do 185 | segment_maps = @wiki_text |> new!() 186 | result = get(segment_maps, ~p"PID-11[2].5") 187 | assert "35200" == result 188 | end 189 | 190 | test "can get fields in multiple segments as list of values" do 191 | segment_maps = @wiki_text |> new!() 192 | result = get(segment_maps, ~p"OBX[*]-5") 193 | assert ["1.80", "79"] == result 194 | end 195 | 196 | test "can get components in multiple segments as list of values" do 197 | segment_maps = @wiki_text |> new!() 198 | result = get(segment_maps, ~p"OBX[*]-3.2") 199 | assert ["Body Height", "Body Weight"] == result 200 | end 201 | 202 | test "can get truncated results to return first position at any level" do 203 | segment_maps = @wiki_text |> new!() 204 | result = get(segment_maps, ~p"OBX[*]-2!") 205 | assert ["N", "NM"] == result 206 | end 207 | 208 | test "can get subcomponent values" do 209 | segment_maps = @wiki_text |> new!() 210 | result = get(segment_maps, ~p"OBX-2.2.1") 211 | assert "K" == result 212 | end 213 | 214 | test "can get from within specific segment numbers" do 215 | segment_maps = @wiki_text |> new!() 216 | result = get(segment_maps, ~p"OBX[2]-6.2") 217 | assert "Kilogram" == result 218 | end 219 | 220 | test "can return nil for missing values" do 221 | segment_maps = @wiki_text |> new!() 222 | result = get(segment_maps, ~p"OBX[2]-2.2.1") 223 | assert nil == result 224 | end 225 | 226 | test "can return nils for missing values in a list of returns" do 227 | segment_maps = @wiki_text |> new!() 228 | result = get(segment_maps, ~p"OBX[*]-2.2.1") 229 | assert ["K", nil] == result 230 | end 231 | 232 | test "can get all message segments as maps" do 233 | result = @wiki_text |> new!() |> get_segments() 234 | 235 | assert [ 236 | %{ 237 | 0 => "MSH", 238 | 1 => "|", 239 | 2 => "^~\\&", 240 | 3 => "MegaReg", 241 | 4 => "XYZHospC", 242 | 5 => "SuperOE", 243 | 6 => "XYZImgCtr", 244 | 7 => "20060529090131-0500", 245 | 9 => %{1 => %{1 => "ADT", 2 => "A01", 3 => "ADT_A01"}}, 246 | 10 => "01052901", 247 | 11 => "P", 248 | 12 => "2.5" 249 | }, 250 | %{0 => "EVN", 2 => "200605290901", 6 => "200605290900"}, 251 | %{ 252 | 0 => "PID", 253 | 3 => %{1 => %{1 => "56782445", 4 => "UAReg", 5 => "PI"}}, 254 | 5 => %{1 => %{1 => "KLEINSAMPLE", 2 => "BARRY", 3 => "Q", 4 => "JR"}}, 255 | 7 => "19620910", 256 | 8 => "M", 257 | 10 => %{1 => %{1 => "2028-9", 3 => "HL70005", 4 => "RA99113", 6 => "XYZ"}}, 258 | 11 => %{ 259 | 1 => %{ 260 | 1 => "260 GOODWIN CREST DRIVE", 261 | 3 => "BIRMINGHAM", 262 | 4 => "AL", 263 | 5 => "35209", 264 | 7 => "M" 265 | }, 266 | 2 => %{ 267 | 1 => "NICKELL’S PICKLES", 268 | 2 => "10000 W 100TH AVE", 269 | 3 => "BIRMINGHAM", 270 | 4 => "AL", 271 | 5 => "35200", 272 | 7 => "O" 273 | } 274 | }, 275 | 18 => %{1 => %{1 => "0105I30001", 4 => "99DEF", 5 => "AN"}} 276 | }, 277 | %{ 278 | 0 => "PV1", 279 | 2 => "I", 280 | 3 => %{1 => %{1 => "W", 2 => "389", 3 => "1", 4 => "UABH", 8 => "3"}}, 281 | 7 => %{ 282 | 1 => %{ 283 | 1 => "12345", 284 | 2 => "MORGAN", 285 | 3 => "REX", 286 | 4 => "J", 287 | 7 => "MD", 288 | 8 => "0010", 289 | 9 => "UAMC", 290 | 10 => "L" 291 | } 292 | }, 293 | 9 => %{ 294 | 1 => %{ 295 | 1 => "67890", 296 | 2 => "GRAINGER", 297 | 3 => "LUCY", 298 | 4 => "X", 299 | 7 => "MD", 300 | 8 => "0010", 301 | 9 => "UAMC", 302 | 10 => "L" 303 | } 304 | }, 305 | 10 => "MED", 306 | 15 => "A0", 307 | 17 => %{ 308 | 1 => %{ 309 | 1 => "13579", 310 | 2 => "POTTER", 311 | 3 => "SHERMAN", 312 | 4 => "T", 313 | 7 => "MD", 314 | 8 => "0010", 315 | 9 => "UAMC", 316 | 10 => "L" 317 | } 318 | }, 319 | 44 => "200605290900" 320 | }, 321 | %{ 322 | 0 => "OBX", 323 | 1 => "1", 324 | 2 => %{1 => %{1 => "N", 2 => %{1 => "K", 2 => "M"}}}, 325 | 3 => %{1 => %{2 => "Body Height"}}, 326 | 5 => "1.80", 327 | 6 => %{1 => %{1 => "m", 2 => "Meter", 3 => "ISO+"}}, 328 | 11 => "F" 329 | }, 330 | %{ 331 | 0 => "OBX", 332 | 1 => "2", 333 | 2 => "NM", 334 | 3 => %{1 => %{2 => "Body Weight"}}, 335 | 5 => "79", 336 | 6 => %{1 => %{1 => "kg", 2 => "Kilogram", 3 => "ISO+"}}, 337 | 11 => "F" 338 | }, 339 | %{0 => "AL1", 1 => "1", 3 => %{1 => %{2 => "ASPIRIN"}}}, 340 | %{ 341 | 0 => "DG1", 342 | 1 => "1", 343 | 3 => %{1 => %{1 => "786.50", 2 => "CHEST PAIN, UNSPECIFIED", 3 => "I9"}}, 344 | 6 => "A" 345 | } 346 | ] == 347 | result 348 | end 349 | 350 | test "can set all message segments as maps" do 351 | hl7 = @wiki_text |> new!() 352 | segments = hl7 |> get_segments() |> Enum.take(2) 353 | updated_hl7 = set_segments(hl7, segments) 354 | updated_segments = get_segments(updated_hl7) 355 | 356 | assert [ 357 | %{ 358 | 0 => "MSH", 359 | 1 => "|", 360 | 2 => "^~\\&", 361 | 3 => "MegaReg", 362 | 4 => "XYZHospC", 363 | 5 => "SuperOE", 364 | 6 => "XYZImgCtr", 365 | 7 => "20060529090131-0500", 366 | 9 => %{1 => %{1 => "ADT", 2 => "A01", 3 => "ADT_A01"}}, 367 | 10 => "01052901", 368 | 11 => "P", 369 | 12 => "2.5" 370 | }, 371 | %{0 => "EVN", 2 => "200605290901", 6 => "200605290900"} 372 | ] == updated_segments 373 | end 374 | end 375 | 376 | describe "HL7.put/2" do 377 | test "can put segment data as list" do 378 | msg = @wiki_text |> new!() |> put(~p"PID", ["PID", "1", "", "STUFF"]) 379 | assert %{0 => "PID", 1 => "1", 3 => "STUFF"} == get(msg, ~p"PID") 380 | end 381 | 382 | test "can put segment data as map" do 383 | map = %{0 => "PID", 1 => "1", 3 => "STUFF"} 384 | msg = @wiki_text |> new!() |> put(~p"PID", map) 385 | assert map == get(msg, ~p"PID") 386 | end 387 | 388 | test "can put field data as string" do 389 | msg = @wiki_text |> new!() |> put(~p"PID-8", "F") 390 | assert "F" == get(msg, ~p"PID-8") 391 | end 392 | 393 | test "can put field data as string while keeping simple internal format unchanged" do 394 | msg = @wiki_text |> new!() |> put(~p"PID-8", "F") 395 | pid = msg.segments |> Enum.at(2) 396 | assert "F" == pid[8] 397 | end 398 | 399 | test "can fail to put field data when segment does not exist" do 400 | msg = @wiki_text |> new!() 401 | assert_raise RuntimeError, fn -> put(msg, ~p"ZZZ-8", "F") end 402 | end 403 | 404 | test "can put field data as string overwriting map" do 405 | msg = @wiki_text |> new!() |> put(~p"PID-3", "SOME_ID") 406 | assert "SOME_ID" == get(msg, ~p"PID-3") 407 | end 408 | 409 | test "can put field data as string overwriting map of repetitions" do 410 | msg = @wiki_text |> new!() |> put(~p"PID-11[*]", "SOME_ID") 411 | assert ["SOME_ID"] == get(msg, ~p"PID-11[*]") 412 | end 413 | 414 | test "can put field data as repetition map overwriting repetitions" do 415 | msg = 416 | @wiki_text 417 | |> new!() 418 | |> put(~p"PID-11[*]", %{1 => "SOME_ID", 2 => "OTHER_ID", 3 => "FINAL_ID"}) 419 | 420 | assert ["SOME_ID", "OTHER_ID", "FINAL_ID"] == get(msg, ~p"PID-11[*]") 421 | end 422 | 423 | test "can put field data as a list overwriting repetitions" do 424 | msg = 425 | @wiki_text 426 | |> new!() 427 | |> put(~p"PID-11[*]", ["SOME_ID", "OTHER_ID", "FINAL_ID"]) 428 | 429 | assert ["SOME_ID", "OTHER_ID", "FINAL_ID"] == get(msg, ~p"PID-11[*]") 430 | end 431 | 432 | test "can put field data as map overwriting map" do 433 | map = %{1 => "123", 4 => "XX", 5 => "BB"} 434 | msg = @wiki_text |> new!() |> put(~p"PID-3", map) 435 | assert map == get(msg, ~p"PID-3") 436 | end 437 | 438 | test "can put repetition data as map overwriting map" do 439 | map = %{1 => "123", 4 => "XX", 5 => "BB"} 440 | msg = @wiki_text |> new!() |> put(~p"PID-3[1]", map) 441 | assert map == get(msg, ~p"PID-3[1]") 442 | end 443 | 444 | test "can put repetition data as list overwriting map" do 445 | map = %{1 => "123", 4 => "XX", 5 => "BB"} 446 | list = ["123", "", "", "XX", "BB"] 447 | msg = @wiki_text |> new!() |> put(~p"PID-3[1]", list) 448 | assert map == get(msg, ~p"PID-3[1]") 449 | end 450 | 451 | test "can put repetition data as map extending map" do 452 | map = %{1 => "123", 4 => "XX", 5 => "BB"} 453 | msg = @wiki_text |> new!() |> put(~p"PID-3[2]", map) 454 | assert map == get(msg, ~p"PID-3[2]") 455 | end 456 | 457 | test "can put repetition data across multiple components" do 458 | msg = @wiki_text |> new!() |> put(~p"PID-11[*].3", "SOME_PLACE") 459 | assert ["SOME_PLACE", "SOME_PLACE"] == get(msg, ~p"PID-11[*].3") 460 | end 461 | 462 | test "can put repetition data across multiple subcomponents" do 463 | msg = @wiki_text |> new!() |> put(~p"PID-11[*].3.2", "SOME_PLACE") 464 | assert ["SOME_PLACE", "SOME_PLACE"] == get(msg, ~p"PID-11[*].3.2") 465 | end 466 | 467 | test "can put repetition data across multiple components with partial path" do 468 | pid = @wiki_text |> new!() |> get(~p"PID") |> put(~p"11[*].3", "SOME_PLACE") 469 | assert ["SOME_PLACE", "SOME_PLACE"] == get(pid, ~p"11[*].3") 470 | end 471 | 472 | test "can put repetition data across multiple subcomponents with partial path" do 473 | pid = @wiki_text |> new!() |> get(~p"PID") |> put(~p"11[*].3.2", "SOME_PLACE") 474 | assert ["SOME_PLACE", "SOME_PLACE"] == get(pid, ~p"11[*].3.2") 475 | end 476 | 477 | test "can put data in a segment using partial path to field" do 478 | pid = @wiki_text |> new!() |> get(~p"PID") |> put(~p"3", "SOME_ID") 479 | assert "SOME_ID" == get(pid, ~p"3") 480 | end 481 | 482 | test "can put data in a segment using partial path to repetition" do 483 | pid = @wiki_text |> new!() |> get(~p"PID") |> put(~p"3[2]", "SOME_ID") 484 | assert "SOME_ID" == get(pid, ~p"3[2]") 485 | end 486 | 487 | test "can put data in a segment using partial path to component" do 488 | pid = @wiki_text |> new!() |> get(~p"PID") |> put(~p"3.2", "SOME_ID") 489 | assert "SOME_ID" == get(pid, ~p"3.2") 490 | end 491 | 492 | test "can put data in a segment using partial path to subcomponent" do 493 | pid = @wiki_text |> new!() |> get(~p"PID") |> put(~p"3.2.4", "SOME_ID") 494 | assert "SOME_ID" == get(pid, ~p"3.2.4") 495 | end 496 | 497 | test "can put data in a segment using partial path to repetition and component" do 498 | pid = @wiki_text |> new!() |> get(~p"PID") |> put(~p"3[2].2", "SOME_ID") 499 | assert "SOME_ID" == get(pid, ~p"3[2].2") 500 | end 501 | 502 | test "can put data in a segment using partial path to repetition and subcomponent" do 503 | pid = @wiki_text |> new!() |> get(~p"PID") |> put(~p"3[1].2.4", "SOME_ID") 504 | assert "SOME_ID" == get(pid, ~p"3[1].2.4") 505 | end 506 | 507 | test "can put component data in a string field" do 508 | msg = @wiki_text |> new!() |> put(~p"PID-8.2", "EXTRA") 509 | assert "EXTRA" == get(msg, ~p"PID-8.2") 510 | assert "M" == get(msg, ~p"PID-8.1") 511 | end 512 | 513 | test "can put data across multiple segments" do 514 | msg = @wiki_text |> new!() |> put(~p"OBX[*]-5", "REDACTED") 515 | assert ["REDACTED", "REDACTED"] == get(msg, ~p"OBX[*]-5") 516 | end 517 | 518 | test "can put across a list of repetitions" do 519 | reps = @wiki_text |> new!() |> get(~p"PID-11[*]") 520 | assert ["REDACTED", "REDACTED"] == put(reps, ~p".2", "REDACTED") |> get(~p".2") 521 | end 522 | 523 | test "can put in a repetition" do 524 | rep = @wiki_text |> new!() |> get(~p"PID-11[2]") 525 | assert "REDACTED" == put(rep, ~p".2", "REDACTED") |> get(~p".2") 526 | end 527 | 528 | test "can raise if putting a larger path directly against a repetition" do 529 | rep = @wiki_text |> new!() |> get(~p"PID-11[2]") 530 | assert_raise RuntimeError, fn -> put(rep, ~p"2.2", "REDACTED") end 531 | end 532 | end 533 | 534 | describe "HL7.update/4" do 535 | test "can update field data as string" do 536 | msg = @wiki_text |> new!() |> update(~p"PID-8", "F", fn data -> data <> "F" end) 537 | assert "MF" == get(msg, ~p"PID-8") 538 | end 539 | 540 | test "can update missing field data as string via default" do 541 | msg = @wiki_text |> new!() |> update(~p"PID-21", "X", fn data -> data <> "F" end) 542 | assert "X" == get(msg, ~p"PID-21") 543 | end 544 | 545 | test "can update field data as string overwriting map" do 546 | msg = @wiki_text |> new!() |> update(~p"PID-3", "SOME_ID", fn data -> data[1] <> "-X" end) 547 | assert "56782445-X" == get(msg, ~p"PID-3") 548 | end 549 | 550 | test "can update field data as list overwriting all repetitions" do 551 | msg = 552 | @wiki_text |> new!() |> update(~p"PID-11[*]", "SOME_ID", fn data -> data ++ ["123"] end) 553 | 554 | assert ["260 GOODWIN CREST DRIVE", "NICKELL’S PICKLES", "123"] == get(msg, ~p"PID-11[*].1") 555 | end 556 | 557 | test "can update field data as empty list overwriting all repetitions" do 558 | msg = 559 | @wiki_text |> new!() |> update(~p"PID-11[*]", [], fn _data -> [] end) 560 | 561 | assert [""] == get(msg, ~p"PID-11[*].1") 562 | end 563 | 564 | test "can update field data as empty string overwriting all repetitions" do 565 | msg = 566 | @wiki_text |> new!() |> update(~p"PID-11[*]", "", fn _data -> "" end) 567 | 568 | assert [""] == get(msg, ~p"PID-11[*].1") 569 | end 570 | 571 | test "can update field data as nil overwriting all repetitions" do 572 | msg = 573 | @wiki_text |> new!() |> update(~p"PID-11[*]", nil, fn _data -> nil end) 574 | 575 | assert [""] == get(msg, ~p"PID-11[*].1") 576 | end 577 | 578 | test "can update missing field data as list with" do 579 | msg = 580 | @wiki_text |> new!() |> update(~p"PID-20[*]", "SOME_ID", fn data -> data ++ ["123"] end) 581 | 582 | assert ["SOME_ID"] == get(msg, ~p"PID-20[*].1") 583 | end 584 | 585 | test "can update field data within a map" do 586 | msg = @wiki_text |> new!() |> update(~p"PID-3", nil, fn data -> Map.put(data, 1, "123") end) 587 | assert %{1 => "123", 4 => "UAReg", 5 => "PI"} == get(msg, ~p"PID-3") 588 | end 589 | 590 | test "can update repetition data within map" do 591 | msg = @wiki_text |> new!() |> update(~p"PID-3[1]", nil, &put(&1, ~p".2", "345")) 592 | assert "345" == get(msg, ~p"PID-3[1].2") 593 | end 594 | 595 | test "can update repetition data across multiple components" do 596 | msg = @wiki_text |> new!() |> update(~p"PID-11[*].3", nil, fn c -> c <> "2" end) 597 | assert ["BIRMINGHAM2", "BIRMINGHAM2"] == get(msg, ~p"PID-11[*].3") 598 | end 599 | 600 | test "can update repetition data across multiple components when the target is a string" do 601 | msg = @wiki_text |> new!() |> update(~p"EVN-2[*].3", "123", fn _ -> "345" end) 602 | assert ["123"] == get(msg, ~p"EVN-2[*].3") 603 | end 604 | 605 | test "can update repetition data across multiple components when the target is a string to be changed" do 606 | msg = @wiki_text |> new!() |> update(~p"EVN-2[*].1", "123", fn _ -> "345" end) 607 | assert ["345"] == get(msg, ~p"EVN-2[*].1") 608 | end 609 | 610 | test "can update repetition data across multiple components with partial path" do 611 | pid = @wiki_text |> new!() |> get(~p"PID") |> update(~p"11[*].3", nil, fn c -> c <> "3" end) 612 | assert ["BIRMINGHAM3", "BIRMINGHAM3"] == get(pid, ~p"11[*].3") 613 | end 614 | 615 | test "can update data in a segment using partial path to field" do 616 | pid = @wiki_text |> new!() |> get(~p"PID") |> update(~p"3", nil, fn f -> f[5] <> "4" end) 617 | assert "PI4" == get(pid, ~p"3") 618 | end 619 | 620 | test "can update data in a segment using partial path to repetition" do 621 | pid = 622 | @wiki_text |> new!() |> get(~p"PID") |> update(~p"11[2]", nil, fn f -> f[3] <> "4" end) 623 | 624 | assert "BIRMINGHAM4" == get(pid, ~p"11[2]") 625 | end 626 | 627 | test "can update data in a segment using partial path to component" do 628 | pid = @wiki_text |> new!() |> get(~p"PID") |> update(~p"11.4", nil, fn c -> c <> "5" end) 629 | assert "AL5" == get(pid, ~p"11.4") 630 | end 631 | 632 | test "can update data in a segment using partial path to subcomponent" do 633 | obx = @wiki_text |> new!() |> get(~p"OBX") |> update(~p"2.2.2", nil, fn s -> s <> "6" end) 634 | assert "M6" == get(obx, ~p"2.2.2") 635 | end 636 | 637 | test "can update data in a segment using partial path to repetition and component" do 638 | pid = @wiki_text |> new!() |> get(~p"PID") |> update(~p"11[2].3", nil, fn c -> c <> "6" end) 639 | assert "BIRMINGHAM6" == get(pid, ~p"11[2].3") 640 | end 641 | 642 | test "can update data in a segment using partial path to repetition and subcomponent" do 643 | obx = 644 | @wiki_text |> new!() |> get(~p"OBX") |> update(~p"2[1].2.2", nil, fn s -> s <> "7" end) 645 | 646 | assert "M7" == get(obx, ~p"2[1].2.2") 647 | end 648 | 649 | test "can update component data in a string field" do 650 | msg = @wiki_text |> new!() |> update(~p"OBX-2.1", nil, fn c -> c <> "8" end) 651 | assert "N8" == get(msg, ~p"OBX-2.1") 652 | end 653 | 654 | test "can update data across multiple segments" do 655 | msg = @wiki_text |> new!() |> update(~p"OBX[*]-5", nil, fn f -> f <> " REDACTED" end) 656 | assert ["1.80 REDACTED", "79 REDACTED"] == get(msg, ~p"OBX[*]-5") 657 | end 658 | 659 | test "can update full segments across multiple segments" do 660 | msg = 661 | @wiki_text 662 | |> new!() 663 | |> update(~p"OBX[*]", nil, fn s -> HL7.put(s, ~p"4", HL7.get(s, ~p"5") <> " JUNK") end) 664 | 665 | assert ["1.80 JUNK", "79 JUNK"] == get(msg, ~p"OBX[*]-4") 666 | end 667 | 668 | test "can update missing data across multiple segments with default values" do 669 | msg = @wiki_text |> new!() |> update(~p"OBX[*]-15", "JUNK", fn f -> f <> " REDACTED" end) 670 | assert ["JUNK", "JUNK"] == get(msg, ~p"OBX[*]-15") 671 | end 672 | end 673 | 674 | describe "HL7.update!/3" do 675 | test "can update! data without default values" do 676 | msg = @wiki_text |> new!() |> update!(~p"PID-11[*].3", fn c -> c <> "2" end) 677 | assert ["BIRMINGHAM2", "BIRMINGHAM2"] == get(msg, ~p"PID-11[*].3") 678 | end 679 | 680 | test "can fail to update! data with missing values" do 681 | msg = @wiki_text |> new!() 682 | assert_raise KeyError, fn -> update!(msg, ~p"PID-11[*].2", fn c -> c <> "2" end) end 683 | end 684 | end 685 | 686 | describe "HL7 inspect protocol" do 687 | test "can inspect an HL7 message struct (long version)" do 688 | assert "#HL7" == 689 | @wiki_text |> new!() |> inspect() 690 | end 691 | 692 | test "can inspect an HL7 message struct (short version)" do 693 | assert "#HL7" == 694 | @wiki_text 695 | |> String.split("\r") 696 | |> List.first() 697 | |> Kernel.<>("\r") 698 | |> new!() 699 | |> inspect() 700 | end 701 | end 702 | 703 | describe "HL7.new/2" do 704 | test "can create new HL7 struct with ok tuple response" do 705 | result = new(@wiki_text) 706 | assert {:ok, new!(@wiki_text)} == result 707 | end 708 | 709 | test "can fail to create HL7 struct with error tuple response" do 710 | result = new("garbage") 711 | assert {:error, %HL7.InvalidMessage{}} = result 712 | end 713 | end 714 | 715 | describe "HL7.new!/2" do 716 | test "can create HL7 struct (with a list of segment maps) from HL7 text" do 717 | result = new!(@wiki_text) 718 | assert is_list(result.segments) 719 | assert Enum.all?(result.segments, &is_map/1) 720 | assert match?(%HL7{}, result) 721 | end 722 | 723 | test "can create HL7 struct from HL7 segment list" do 724 | segments = @wiki_text |> new!() |> get_segments() 725 | assert new!(@wiki_text) == new!(segments) 726 | end 727 | 728 | test "can create HL7 struct from HL7.Message struct" do 729 | msg = @wiki_text |> HL7.Message.new() 730 | assert new!(@wiki_text) == new!(msg) 731 | end 732 | 733 | test "can convert HL7 Maps back and forth to text" do 734 | converted = @wiki_text |> new!() |> to_string() 735 | assert converted == @wiki_text 736 | end 737 | end 738 | 739 | describe "HL7.label/2" do 740 | test "can label source data using an output map template" do 741 | result = @wiki_text |> new!() |> label(%{mrn: ~p"PID-3!", name: ~p"PID-5.2"}) 742 | assert %{mrn: "56782445", name: "BARRY"} == result 743 | end 744 | 745 | test "can label source data using an output map template with functions" do 746 | fun = fn data -> get(data, ~p"PID-5.2") end 747 | result = @wiki_text |> new!() |> label(%{mrn: ~p"PID-3!", name: fun}) 748 | assert %{mrn: "56782445", name: "BARRY"} == result 749 | end 750 | 751 | test "can label source data using an output map template with nested maps" do 752 | result = 753 | @wiki_text 754 | |> new!() 755 | |> label(%{address: %{main: ~p"PID-11!", alt: ~p"PID-11[2]!"}, name: ~p"PID-5.2"}) 756 | 757 | assert %{ 758 | name: "BARRY", 759 | address: %{alt: "NICKELL’S PICKLES", main: "260 GOODWIN CREST DRIVE"} 760 | } == 761 | result 762 | end 763 | 764 | test "can label source data using an output map template with nested lists" do 765 | result = 766 | @wiki_text 767 | |> new!() 768 | |> label(%{address: [~p"PID-11!", ~p"PID-11[2]!"], name: ~p"PID-5.2"}) 769 | 770 | assert %{address: ["260 GOODWIN CREST DRIVE", "NICKELL’S PICKLES"], name: "BARRY"} == result 771 | end 772 | 773 | test "can label source data with nils instead of empty strings" do 774 | result = @wiki_text |> new!() |> label(%{evn: ~p"EVN-2", no_evn: ~p"EVN-3"}) 775 | assert %{evn: "200605290901", no_evn: nil} == result 776 | end 777 | 778 | test "can label source data with constant included" do 779 | result = @wiki_text |> new!() |> label(%{evn: ~p"EVN-2", hard: "coded"}) 780 | assert %{evn: "200605290901", hard: "coded"} == result 781 | end 782 | end 783 | 784 | describe "HL7.chunk_by_lead_segment/2" do 785 | test "can chunk HL7 map data into groups of segments based on the lead segment name" do 786 | chunks = HL7.Examples.nist_immunization_hl7() |> new!() |> chunk_by_lead_segment("ORC") 787 | counts = Enum.map(chunks, &Enum.count/1) 788 | assert [7, 2, 13] == counts 789 | end 790 | 791 | test "can chunk HL7 map data into groups of segments based on the lead segment name and keep non-matching prefix segments" do 792 | chunks = 793 | HL7.Examples.nist_immunization_hl7() 794 | |> new!() 795 | |> chunk_by_lead_segment("ORC", keep_prefix_segments: true) 796 | 797 | counts = Enum.map(chunks, &Enum.count/1) 798 | assert [2, 7, 2, 13] == counts 799 | end 800 | 801 | test "can chunk lists of map data into groups of segments based on the lead segment name" do 802 | chunks = 803 | HL7.Examples.nist_immunization_hl7() 804 | |> new!() 805 | |> get_segments() 806 | |> chunk_by_lead_segment("ORC") 807 | 808 | counts = Enum.map(chunks, &Enum.count/1) 809 | assert [7, 2, 13] == counts 810 | end 811 | end 812 | 813 | describe "HL7.to_list/1" do 814 | test "converts HL7 Structs to HL7 list data" do 815 | list = @wiki_text |> new!() |> to_list() 816 | assert is_list(list) 817 | assert Enum.all?(list, &is_list/1) 818 | 819 | assert [ 820 | "OBX", 821 | "1", 822 | [["N", ["K", "M"]]], 823 | [["", "Body Height"]], 824 | "", 825 | "1.80", 826 | [["m", "Meter", "ISO+"]], 827 | "", 828 | "", 829 | "", 830 | "", 831 | "F" 832 | ] == Enum.at(list, 4) 833 | end 834 | end 835 | 836 | test "Can open a good mllp message from file stream using file type inference" do 837 | filepath = tmp_path("wiki.hl7") 838 | wiki_hl7 = HL7.Examples.wikipedia_sample_hl7() 839 | File.write!(filepath, @sb <> wiki_hl7 <> @ending) 840 | assert wiki_hl7 == open_hl7_file_stream(filepath) |> Enum.at(0) 841 | end 842 | 843 | test "Can open a good message from file stream using file type inference" do 844 | filepath = tmp_path("wiki.hl7") 845 | wiki_hl7 = HL7.Examples.wikipedia_sample_hl7() 846 | File.write!(filepath, wiki_hl7) 847 | assert wiki_hl7 == open_hl7_file_stream(filepath) |> Enum.at(0) 848 | end 849 | 850 | test "Attempting to open a bogus file returns unrecognized_file_type type error when using file type inference" do 851 | filepath = tmp_path("not_really_hl7.hl7") 852 | File.write!(filepath, "NOT A REAL HL7 FILE.") 853 | assert {:error, :unrecognized_file_type} == open_hl7_file_stream(filepath) 854 | end 855 | 856 | test "Attempting to open a non-existent file returns {:error, :enoent} when using file type inference" do 857 | filepath = tmp_path("no_such_file.hl7") 858 | assert {:error, :enoent} == open_hl7_file_stream(filepath) 859 | end 860 | 861 | test "Attempting to open a non-existent file returns {:error, :enoent} for :mllp" do 862 | filepath = tmp_path("no_such_file.hl7") 863 | assert {:error, :enoent} == open_hl7_file_stream(filepath, :mllp) 864 | end 865 | 866 | test "Can open a good message from file stream using split stream" do 867 | filepath = tmp_path("wiki.hl7") 868 | wiki_hl7 = HL7.Examples.wikipedia_sample_hl7() 869 | File.write!(filepath, wiki_hl7) 870 | assert wiki_hl7 == open_hl7_file_stream(filepath) |> Enum.at(0) 871 | end 872 | end 873 | --------------------------------------------------------------------------------